ngram
listlengths
0
82k
[ "class Meta: fields = ( \"id\", \"couverture\", \"usage\", \"millesime\", \"map_color\",", "( \"nom\", \"code_insee\", \"surface\", ) geo_field = \"mpoly\" model =", "\"surface\", ) geo_field = \"mpoly\" model = ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer):", "GeoJSON serializer.\"\"\" class Meta: \"\"\"Marker serializer meta class.\"\"\" fields =", "\"label\", \"is_artificial\", ) model = CouvertureSol class UsageSolSerializer(serializers.ModelSerializer): class Meta:", "label=obj.couverture_label) def get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields", "get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = ( \"id\", \"couverture\", \"surface\",", "geo_field = \"mpoly\" model = Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture =", "= Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField()", "\"inconnu\" return f\"{code} {label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018", "( \"id\", \"surface\", ) geo_field = \"mpoly\" model = Sybarval", "serializers from rest_framework import serializers as s from .models import", "return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta: fields = ( \"id\", \"surface\",", "obj): return get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self, obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label)", "model = Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON serializer.\"\"\" class Meta:", "usage = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) def", "geo_field = \"mpoly\" model = Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 =", "\"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\" model = Renaturee2018to2015", "\"couverture\", \"usage\", ) geo_field = \"mpoly\" model = Voirie2018 class", "return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\", \"surface\",", "s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields", "geo_field = \"mpoly\" model = EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture =", "couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj): return", "\"id\", \"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\"", "= ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\",", "class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 =", "return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\", \"couverture\",", "geo_field = \"mpoly\" model = Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture =", "Artificialisee2015to2018, Artificielle2018, CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015, Sybarval, Voirie2018, ZonesBaties2018,", "obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = ( \"id\",", "get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta:", "\"mpoly\" model = Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage", "\"usage\", \"surface\", ) geo_field = \"mpoly\" model = ZonesBaties2018 class", "CouvertureSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\", \"parent\", \"code\", \"label\",", "geo_field = \"mpoly\" model = Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON", "model = ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer): class Meta: fields = (", "= s.SerializerMethodField() def get_usage_2015(self, obj): return get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self,", "= s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self,", ") geo_field = \"mpoly\" model = EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture", "label=obj.usage_label) class Meta: fields = ( \"id\", \"couverture\", \"usage\", \"millesime\",", "Meta: fields = ( \"id\", \"surface\", ) geo_field = \"mpoly\"", "class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self,", ") geo_field = \"mpoly\" model = ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer): class", "class CouvertureSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\", \"parent\", \"code\",", "fields = ( \"id\", \"parent\", \"code\", \"label\", \"is_artificial\", ) model", "class Meta: fields = ( \"id\", \"surface\", \"couverture\", \"usage\", )", "Meta: fields = ( \"id\", \"parent\", \"code\", \"label\", \"is_artificial\", )", "\"mpoly\" model = CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def", "s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018 = s.SerializerMethodField()", "\"id\", \"parent\", \"code\", \"label\", \"is_artificial\", ) model = CouvertureSol class", "class Meta: \"\"\"Marker serializer meta class.\"\"\" fields = ( \"nom\",", "get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = (", "return get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label) class", "( \"id\", \"surface\", \"couverture\", ) geo_field = \"mpoly\" model =", "OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj):", "\"couverture\", \"usage\", \"millesime\", \"map_color\", \"year\", ) geo_field = \"mpoly\" model", "fields = ( \"id\", \"surface\", ) geo_field = \"mpoly\" model", "UsageSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\", \"parent\", \"code\", \"label\",", "\"id\", \"couverture\", \"usage\", \"millesime\", \"map_color\", \"year\", ) geo_field = \"mpoly\"", "label=\"\"): if code is None: code = \"-\" if label", "Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON serializer.\"\"\" class Meta: \"\"\"Marker serializer", "None: label = \"inconnu\" return f\"{code} {label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015", "\"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\" model = Artificialisee2015to2018 class", "\"surface\", \"couverture\", \"usage\", ) geo_field = \"mpoly\" model = Voirie2018", "get_couverture_2018(self, obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta: fields = (", "fields = ( \"id\", \"couverture\", \"usage\", \"millesime\", \"map_color\", \"year\", )", "fields = ( \"id\", \"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\", )", "CouvertureSol class UsageSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\", \"parent\",", "s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self, obj):", "label=obj.usage_label) class Meta: fields = ( \"id\", \"surface\", \"couverture\", \"usage\",", "def get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields =", "if code is None: code = \"-\" if label is", "meta class.\"\"\" fields = ( \"nom\", \"code_insee\", \"surface\", ) geo_field", "Meta: fields = ( \"id\", \"surface\", \"couverture\", \"usage\", ) geo_field", "<reponame>MTES-MCT/sparte from rest_framework_gis import serializers from rest_framework import serializers as", "import serializers as s from .models import ( Artificialisee2015to2018, Artificielle2018,", "Voirie2018, ZonesBaties2018, UsageSol, ) def get_label(code=\"\", label=\"\"): if code is", "get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self, obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta:", "return get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self, obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def", "Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def", "\"mpoly\" model = Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON serializer.\"\"\" class", "fields = ( \"nom\", \"code_insee\", \"surface\", ) geo_field = \"mpoly\"", "\"id\", \"surface\", \"couverture\", \"usage\", ) geo_field = \"mpoly\" model =", "label=obj.usage_label) class Meta: fields = ( \"id\", \"couverture\", \"usage\", \"surface\",", "\"mpoly\" model = ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer): class Meta: fields =", "\"couverture\", ) geo_field = \"mpoly\" model = Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer):", ".models import ( Artificialisee2015to2018, Artificielle2018, CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015,", "Artificielle2018, CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015, Sybarval, Voirie2018, ZonesBaties2018, UsageSol,", "= \"inconnu\" return f\"{code} {label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField()", "label=obj.cs_2015_label) def get_couverture_2018(self, obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta: fields", "= s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018 = s.SerializerMethodField() def get_usage_2015(self,", "\"couverture_2018\", ) geo_field = \"mpoly\" model = Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer):", "= \"mpoly\" model = Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField()", "\"id\", \"surface\", ) geo_field = \"mpoly\" model = Sybarval class", "Meta: \"\"\"Marker serializer meta class.\"\"\" fields = ( \"nom\", \"code_insee\",", "model = Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage =", "CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015, Sybarval, Voirie2018, ZonesBaties2018, UsageSol, )", "\"is_artificial\", ) model = CouvertureSol class UsageSolSerializer(serializers.ModelSerializer): class Meta: fields", "fields = ( \"id\", \"surface\", \"couverture\", \"usage\", ) geo_field =", "= ( \"id\", \"surface\", \"couverture\", \"usage\", ) geo_field = \"mpoly\"", "def get_usage_2018(self, obj): return get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self, obj): return", "= s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018 =", "class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self,", "Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj):", "\"surface\", ) geo_field = \"mpoly\" model = CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer):", "obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self, obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label)", "label=obj.couverture_label) class Meta: fields = ( \"id\", \"couverture\", \"surface\", )", "obj): return get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label)", ") def get_label(code=\"\", label=\"\"): if code is None: code =", "\"couverture\", \"surface\", ) geo_field = \"mpoly\" model = EnveloppeUrbaine2018 class", "Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField()", "( Artificialisee2015to2018, Artificielle2018, CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015, Sybarval, Voirie2018,", ") geo_field = \"mpoly\" model = CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture", "\"mpoly\" model = Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage", ") model = CouvertureSol class UsageSolSerializer(serializers.ModelSerializer): class Meta: fields =", "class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 =", "= s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture,", "get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\", \"surface\", \"couverture\",", "s.SerializerMethodField() couverture_2018 = s.SerializerMethodField() def get_usage_2015(self, obj): return get_label(code=obj.us_2015, label=obj.us_2015_label)", "label=obj.cs_2018_label) class Meta: fields = ( \"id\", \"surface\", \"usage_2015\", \"usage_2018\",", "geo_field = \"mpoly\" model = Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture =", "= \"-\" if label is None: label = \"inconnu\" return", "fields = ( \"id\", \"surface\", \"couverture\", ) geo_field = \"mpoly\"", "None: code = \"-\" if label is None: label =", "= ( \"id\", \"surface\", ) geo_field = \"mpoly\" model =", "def get_label(code=\"\", label=\"\"): if code is None: code = \"-\"", "Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015", "def get_usage_2015(self, obj): return get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self, obj): return", "= EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField()", "couverture_2018 = s.SerializerMethodField() def get_usage_2015(self, obj): return get_label(code=obj.us_2015, label=obj.us_2015_label) def", "CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON serializer.\"\"\" class Meta: \"\"\"Marker serializer meta class.\"\"\"", "class Meta: fields = ( \"id\", \"couverture\", \"surface\", ) geo_field", "\"usage\", \"millesime\", \"map_color\", \"year\", ) geo_field = \"mpoly\" model =", "class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta: fields = ( \"id\", \"surface\", )", "class Meta: fields = ( \"id\", \"parent\", \"code\", \"label\", \"is_artificial\",", "class Meta: fields = ( \"id\", \"couverture\", \"usage\", \"surface\", )", "rest_framework_gis import serializers from rest_framework import serializers as s from", "return f\"{code} {label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 =", "rest_framework import serializers as s from .models import ( Artificialisee2015to2018,", "= ( \"nom\", \"code_insee\", \"surface\", ) geo_field = \"mpoly\" model", "from rest_framework_gis import serializers from rest_framework import serializers as s", "( \"id\", \"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field =", "def get_couverture_2018(self, obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta: fields =", "label=obj.us_2015_label) def get_usage_2018(self, obj): return get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self, obj):", "Ocsge, Renaturee2018to2015, Sybarval, Voirie2018, ZonesBaties2018, UsageSol, ) def get_label(code=\"\", label=\"\"):", "geo_field = \"mpoly\" model = CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture =", "return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = ( \"id\", \"couverture\",", "\"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\" model = Renaturee2018to2015 class", "model = CouvertureSol class UsageSolSerializer(serializers.ModelSerializer): class Meta: fields = (", ") geo_field = \"mpoly\" model = Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker", "\"surface\", ) geo_field = \"mpoly\" model = EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer):", "is None: label = \"inconnu\" return f\"{code} {label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer):", "couverture_2015 = s.SerializerMethodField() couverture_2018 = s.SerializerMethodField() def get_usage_2015(self, obj): return", "= s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta:", "\"mpoly\" model = Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta: fields =", "= ( \"id\", \"couverture\", \"usage\", \"surface\", ) geo_field = \"mpoly\"", "= Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON serializer.\"\"\" class Meta: \"\"\"Marker", "= ( \"id\", \"couverture\", \"usage\", \"millesime\", \"map_color\", \"year\", ) geo_field", "EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label)", "is None: code = \"-\" if label is None: label", "Meta: fields = ( \"id\", \"couverture\", \"usage\", \"surface\", ) geo_field", "class Meta: fields = ( \"id\", \"parent\", \"code\", \"label\", )", ") geo_field = \"mpoly\" model = Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture", "label=obj.couverture_label) class Meta: fields = ( \"id\", \"surface\", \"couverture\", )", "usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018 = s.SerializerMethodField() def", "serializers as s from .models import ( Artificialisee2015to2018, Artificielle2018, CommunesSybarval,", "Meta: fields = ( \"id\", \"couverture\", \"usage\", \"millesime\", \"map_color\", \"year\",", "\"id\", \"couverture\", \"surface\", ) geo_field = \"mpoly\" model = EnveloppeUrbaine2018", "= \"mpoly\" model = Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField()", "SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta: fields = ( \"id\", \"surface\", ) geo_field", "Meta: fields = ( \"id\", \"surface\", \"couverture\", ) geo_field =", "couverture = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class", "( \"id\", \"couverture\", \"usage\", \"millesime\", \"map_color\", \"year\", ) geo_field =", ") geo_field = \"mpoly\" model = Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture", "obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\",", "s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label)", "from .models import ( Artificialisee2015to2018, Artificielle2018, CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018, Ocsge,", "Sybarval, Voirie2018, ZonesBaties2018, UsageSol, ) def get_label(code=\"\", label=\"\"): if code", "= ( \"id\", \"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field", "\"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\" model = Artificialisee2015to2018", "\"code\", \"label\", \"is_artificial\", ) model = CouvertureSol class UsageSolSerializer(serializers.ModelSerializer): class", "import serializers from rest_framework import serializers as s from .models", "model = Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta: fields = (", "return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self, obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class", "class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON serializer.\"\"\" class Meta: \"\"\"Marker serializer meta", "from rest_framework import serializers as s from .models import (", "CouvertureSol, EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015, Sybarval, Voirie2018, ZonesBaties2018, UsageSol, ) def", "get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self, obj): return get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self,", "model = Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage =", "if label is None: label = \"inconnu\" return f\"{code} {label[:30]}\"", "= ( \"id\", \"parent\", \"code\", \"label\", ) model = UsageSol", "return get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self, obj): return get_label(code=obj.us_2018, label=obj.us_2018_label) def", "( \"id\", \"couverture\", \"surface\", ) geo_field = \"mpoly\" model =", "get_couverture_2015(self, obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self, obj): return get_label(code=obj.cs_2018,", "= ( \"id\", \"couverture\", \"surface\", ) geo_field = \"mpoly\" model", "= Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField()", "= s.SerializerMethodField() couverture_2018 = s.SerializerMethodField() def get_usage_2015(self, obj): return get_label(code=obj.us_2015,", "get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self, obj): return get_label(code=obj.usage,", "= \"mpoly\" model = EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField()", "\"mpoly\" model = Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018", "def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields =", "get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\", \"couverture\", \"usage\",", "= Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj):", "( \"id\", \"surface\", \"couverture\", \"usage\", ) geo_field = \"mpoly\" model", "class Meta: fields = ( \"id\", \"surface\", ) geo_field =", ") geo_field = \"mpoly\" model = Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class", "= \"mpoly\" model = Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField()", "get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = ( \"id\", \"surface\", \"couverture\",", "Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return", "class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture,", "\"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\" model", "get_label(code=\"\", label=\"\"): if code is None: code = \"-\" if", "label = \"inconnu\" return f\"{code} {label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 =", "\"millesime\", \"map_color\", \"year\", ) geo_field = \"mpoly\" model = Ocsge", "obj): return get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self, obj): return get_label(code=obj.us_2018, label=obj.us_2018_label)", "model = EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage =", "\"surface\", ) geo_field = \"mpoly\" model = Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer):", "get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta: fields = ( \"id\", \"surface\", \"usage_2015\",", "ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj):", "\"parent\", \"code\", \"label\", \"is_artificial\", ) model = CouvertureSol class UsageSolSerializer(serializers.ModelSerializer):", "Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label)", "= CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj):", "serializer.\"\"\" class Meta: \"\"\"Marker serializer meta class.\"\"\" fields = (", "\"mpoly\" model = Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def", "= \"mpoly\" model = CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField()", "fields = ( \"id\", \"parent\", \"code\", \"label\", ) model =", "serializer meta class.\"\"\" fields = ( \"nom\", \"code_insee\", \"surface\", )", "\"-\" if label is None: label = \"inconnu\" return f\"{code}", "f\"{code} {label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField()", "\"id\", \"surface\", \"couverture\", ) geo_field = \"mpoly\" model = Artificielle2018", "= \"mpoly\" model = Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField()", "Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def", "( \"id\", \"parent\", \"code\", \"label\", \"is_artificial\", ) model = CouvertureSol", "class UsageSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\", \"parent\", \"code\",", "label=obj.us_2018_label) def get_couverture_2015(self, obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self, obj):", "\"\"\"Marker GeoJSON serializer.\"\"\" class Meta: \"\"\"Marker serializer meta class.\"\"\" fields", "= \"mpoly\" model = Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON serializer.\"\"\"", "= ( \"id\", \"parent\", \"code\", \"label\", \"is_artificial\", ) model =", "EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015, Sybarval, Voirie2018, ZonesBaties2018, UsageSol, ) def get_label(code=\"\",", "code = \"-\" if label is None: label = \"inconnu\"", "usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018", "= Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta: fields = ( \"id\",", "Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta: fields = ( \"id\", \"surface\",", "class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self,", "\"id\", \"couverture\", \"usage\", \"surface\", ) geo_field = \"mpoly\" model =", "\"surface\", \"couverture\", ) geo_field = \"mpoly\" model = Artificielle2018 class", "\"couverture_2018\", ) geo_field = \"mpoly\" model = Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer):", "geo_field = \"mpoly\" model = Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta:", "UsageSol, ) def get_label(code=\"\", label=\"\"): if code is None: code", "code is None: code = \"-\" if label is None:", "{label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015", "get_usage_2018(self, obj): return get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self, obj): return get_label(code=obj.cs_2015,", "\"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\" model =", "ZonesBaties2018, UsageSol, ) def get_label(code=\"\", label=\"\"): if code is None:", "get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = (", "= \"mpoly\" model = Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta: fields", "\"usage\", ) geo_field = \"mpoly\" model = Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer):", "get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self, obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self,", "\"mpoly\" model = EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage", "= CouvertureSol class UsageSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\",", "Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField()", "ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\", \"parent\",", "( \"id\", \"couverture\", \"usage\", \"surface\", ) geo_field = \"mpoly\" model", ") geo_field = \"mpoly\" model = Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture", "model = Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 =", "\"nom\", \"code_insee\", \"surface\", ) geo_field = \"mpoly\" model = CommunesSybarval", "fields = ( \"id\", \"couverture\", \"surface\", ) geo_field = \"mpoly\"", "class Meta: fields = ( \"id\", \"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\",", "as s from .models import ( Artificialisee2015to2018, Artificielle2018, CommunesSybarval, CouvertureSol,", "\"\"\"Marker serializer meta class.\"\"\" fields = ( \"nom\", \"code_insee\", \"surface\",", "class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture,", "model = CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self,", "EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def", ") geo_field = \"mpoly\" model = Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015", "fields = ( \"id\", \"couverture\", \"usage\", \"surface\", ) geo_field =", "Meta: fields = ( \"id\", \"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\",", "model = Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self,", "= Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField()", "geo_field = \"mpoly\" model = ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer): class Meta:", "obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta: fields = ( \"id\",", "s from .models import ( Artificialisee2015to2018, Artificielle2018, CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018,", "Renaturee2018to2015, Sybarval, Voirie2018, ZonesBaties2018, UsageSol, ) def get_label(code=\"\", label=\"\"): if", "return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = ( \"id\", \"surface\",", "import ( Artificialisee2015to2018, Artificielle2018, CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015, Sybarval,", "get_usage_2015(self, obj): return get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self, obj): return get_label(code=obj.us_2018,", "def get_couverture_2015(self, obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self, obj): return", "class Meta: fields = ( \"id\", \"surface\", \"couverture\", ) geo_field", "= ( \"id\", \"surface\", \"couverture\", ) geo_field = \"mpoly\" model", "class.\"\"\" fields = ( \"nom\", \"code_insee\", \"surface\", ) geo_field =", "def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self, obj): return", "\"year\", ) geo_field = \"mpoly\" model = Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer):", "\"couverture\", \"usage\", \"surface\", ) geo_field = \"mpoly\" model = ZonesBaties2018", "s.SerializerMethodField() def get_usage_2015(self, obj): return get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self, obj):", "label is None: label = \"inconnu\" return f\"{code} {label[:30]}\" class", "\"map_color\", \"year\", ) geo_field = \"mpoly\" model = Ocsge class", "s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018 = s.SerializerMethodField() def get_usage_2015(self, obj):", "= \"mpoly\" model = ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer): class Meta: fields", "\"code_insee\", \"surface\", ) geo_field = \"mpoly\" model = CommunesSybarval class", "Meta: fields = ( \"id\", \"parent\", \"code\", \"label\", ) model", "Meta: fields = ( \"id\", \"couverture\", \"surface\", ) geo_field =", "CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return" ]
[ "from django.contrib import admin from .models import SearchResult # Register", "your models here. class SearchResultAdmin(admin.ModelAdmin): fields = [\"query\", \"heading\", \"url\",", "import admin from .models import SearchResult # Register your models", "import SearchResult # Register your models here. class SearchResultAdmin(admin.ModelAdmin): fields", "# Register your models here. class SearchResultAdmin(admin.ModelAdmin): fields = [\"query\",", "django.contrib import admin from .models import SearchResult # Register your", "Register your models here. class SearchResultAdmin(admin.ModelAdmin): fields = [\"query\", \"heading\",", "here. class SearchResultAdmin(admin.ModelAdmin): fields = [\"query\", \"heading\", \"url\", \"text\"] admin.site.register(SearchResult,", "class SearchResultAdmin(admin.ModelAdmin): fields = [\"query\", \"heading\", \"url\", \"text\"] admin.site.register(SearchResult, SearchResultAdmin)", ".models import SearchResult # Register your models here. class SearchResultAdmin(admin.ModelAdmin):", "SearchResult # Register your models here. class SearchResultAdmin(admin.ModelAdmin): fields =", "models here. class SearchResultAdmin(admin.ModelAdmin): fields = [\"query\", \"heading\", \"url\", \"text\"]", "from .models import SearchResult # Register your models here. class", "admin from .models import SearchResult # Register your models here." ]
[ "nlu_data = await asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data() ) # if stories.is_empty()", "output_path: Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text]", "train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod from rasa_addons.importers import BotfrontFileImporter", "was found. Only an nlu-model was created.\" \"Please specify a", "= await file_importer.get_domain() # if domain.is_empty(): # return await handle_domain_if_not_exists(", "rasa.importers.importer import TrainingDataImporter from rasa import model from rasa.model import", "persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, ): if", "training data given. Please provide stories and NLU data in", "# training NLU only hence the training files still have", "to retrain Core model.\", color=bcolors.OKBLUE, ) if fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data(", "[l for l, d in nlu_data.items() if d.is_empty()] fingerprint_comparison.core =", "config: Text, stories: Text, output: Text, train_path: Optional[Text] = None,", "if domain.is_empty(): print_error( \"Core training was skipped because no valid", "None, ) -> Optional[Text]: \"\"\"Train Core with validated training and", "Core model will be trained.\") # return await _train_core_with_validated_data( #", "Rasa Core model will be trained.\") # return await _train_core_with_validated_data(", "Core with validated training and config data.\"\"\" import rasa.core.train with", "List[Text]] = True ) -> Optional[Text]: \"\"\"Train NLU with validated", "persist_nlu_training_data=persist_nlu_training_data, ) async def _train_nlu_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path:", "\" \"the updated templates will be created.\", color=bcolors.OKBLUE, ) await", "argument or check if the provided domain file exists.\" )", "import asyncio import os import tempfile from contextlib import ExitStack", "tempfile from contextlib import ExitStack from typing import Text, Optional,", "Please provide NLU data in order to train \" \"a", "file_importer = BotfrontFileImporter(config, domain, training_files) # domain = await file_importer.get_domain()", "the config file for Core. stories: Path to the Core", "fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict]", "\"\".format(os.path.abspath(old_model)) ) return old_model async def _do_training( file_importer: TrainingDataImporter, output_path:", "data has not changed. fixed_model_name: Name of model to be", "of all langs fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain = await", "output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) async def _train_core_with_validated_data( file_importer: TrainingDataImporter,", "def _train_nlu_async( config: Text, nlu_data: Text, output: Text, train_path: Optional[Text]", "If `True` retrain model even if data has not changed.", "exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod config = await", "_train_core_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) async def _train_core_with_validated_data(", "the model will be trained in a temporary directory, otherwise", "will be trained.\") # return await _train_core_with_validated_data( # file_importer, #", "trained model files. \"\"\" loop = asyncio.get_event_loop() return loop.run_until_complete( _train_nlu_async(", "fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) ) async def train_core_async( domain: Union[Domain, Text],", "bool = False, ): if not nlu_data: print_error( \"No NLU", "NLU. output_path: Output path. force_training: If `True` retrain model even", "for l, d in nlu_data.items() if d.is_empty()] fingerprint_comparison.core = fingerprint_comparison.core", "domain, training_files # ) with ExitStack() as stack: train_path =", "supplies the training data. train_path: Directory in which to train", "model will not be compressed. additional_arguments: Additional training parameters. Returns:", "still have to be selected file_importer = TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data]", "_train_nlu_with_validated_data( file_importer, output=output_path, fixed_model_name=fixed_model_name ) print_warning( \"Core training was skipped", "= FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name,", "created.\", color=bcolors.OKBLUE, ) await model.update_model_with_new_domain(file_importer, train_path) else: print_color( \"Core stories/configuration", "# ) new_fingerprint = await model.model_fingerprint(file_importer) old_model = model.get_latest_model(output_path) fingerprint_comparison", "training data. output: Output path. train_path: If `None` the model", "# </ bf mod if fingerprint_comparison.is_training_required(): await _do_training( file_importer, output_path=output_path,", "train_path is None: # Only Core was trained. new_fingerprint =", "Otherwise, create a temp train path and clean it up", "model archive, otherwise the path to the directory with the", "validated training and config data.\"\"\" import rasa.nlu.train with ExitStack() as", "format. \" \"The NLU model training will be skipped now.\"", "TrainingDataImporter from rasa import model from rasa.model import FingerprintComparisonResult from", "old_model async def _do_training( file_importer: TrainingDataImporter, output_path: Text, train_path: Text,", "`True` if the NLU training data should be persisted with", "additional_arguments: Optional[Dict], ) -> Optional[Text]: \"\"\"Trains a Rasa model (Core", "= None, ): if not fingerprint_comparison_result: fingerprint_comparison_result = FingerprintComparisonResult() if", "_train_nlu_async( config: Text, nlu_data: Text, output: Text, train_path: Optional[Text] =", "argument.\" ) return # training NLU only hence the training", "data has not changed. persist_nlu_training_data: `True` if the NLU training", "model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\", ) return", "files. \"\"\" loop = asyncio.get_event_loop() return loop.run_until_complete( _train_nlu_async( config, nlu_data,", "data format. \" \"The NLU model training will be skipped", "with the model. additional_arguments: Additional training parameters. Returns: Path of", "loop = asyncio.get_event_loop() except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) return", "is None: # Only Core was trained. new_fingerprint = await", "models[lang], _ = await rasa.nlu.train( config[lang], file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data,", "to the domain file. config: Path to the config file", "stories given. Please provide stories in order to \" \"train", "False, additional_arguments: Optional[Dict] = None, ): if not fingerprint_comparison_result: fingerprint_comparison_result", "domain file exists.\" ) return None if not await file_importer.get_stories():", "Use only from `train_async`. Args: file_importer: `TrainingDataImporter` which supplies the", "persist_nlu_training_data, additional_arguments, ) async def handle_domain_if_not_exists( file_importer: TrainingDataImporter, output_path, fixed_model_name", "NLU model.\", color=bcolors.OKBLUE, ) def train_core( domain: Union[Domain, Text], config:", "asyncio.gather( file_importer.get_domain(), file_importer.get_config() ) await rasa.core.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME),", "only from `train_async`. Args: file_importer: `TrainingDataImporter` which supplies the training", ") def train_core( domain: Union[Domain, Text], config: Text, stories: Text,", "True ) -> Optional[Text]: \"\"\"Train NLU with validated training and", "domain.is_empty(): # return await handle_domain_if_not_exists( # file_importer, output_path, fixed_model_name #", "training_files=training_files, output_path=output, force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) ) async def", "color=bcolors.OKBLUE) # /bf mod print_color(\"NLU model training completed.\", color=bcolors.OKBLUE) if", "TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data] ) training_datas = await file_importer.get_nlu_data() if training_datas.is_empty():", "completed.\", color=bcolors.OKBLUE) if train_path is None: # Only NLU was", "\"train a Rasa Core model using the '--stories' argument.\" )", "the provided domain file exists.\" ) return nlu_model_only async def", "output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None,", "Path to the config file for NLU. nlu_data: Path to", ") domain = await file_importer.get_domain() if domain.is_empty(): print_error( \"Core training", "_train_path = train_path else: # Otherwise, create a temp train", "config[lang], file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(\"NLU data for", "Optional[Dict] = None, ) -> Optional[Text]: loop = asyncio.get_event_loop() return", "bcolors, print_color, ) from rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def train(", "changed. A new model with \" \"the updated templates will", "No need to retrain NLU model.\", color=bcolors.OKBLUE, ) def train_core(", "file_importer.get_nlu_data() if training_datas.is_empty(): print_error( f\"Path '{nlu_data}' doesn't contain valid NLU", "trained.\") # return await _train_core_with_validated_data( # file_importer, # output=output_path, #", "fixed_model_name # ) # /bf mod return await _train_async_internal( file_importer,", "= asyncio.get_event_loop() except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop.run_until_complete(", "Optional[Text] = None, persist_nlu_training_data: bool = False, ): if not", "from rasa.importers.importer import TrainingDataImporter from rasa import model from rasa.model", "# normal (not compare) training print_color(\"Training Core model...\", color=bcolors.OKBLUE) domain,", "rasa.cli.utils import ( print_success, print_warning, print_error, bcolors, print_color, ) from", "train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, ) async def _train_nlu_with_validated_data( file_importer: TrainingDataImporter, output:", "output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\", ) return _train_path def train_nlu( config:", "skipping training...\".format(lang), color=bcolors.OKBLUE) # /bf mod print_color(\"NLU model training completed.\",", "file_importer.get_stories(): print_error( \"No stories given. Please provide stories in order", "train_path=train_path, fixed_model_name=fixed_model_name, ) print_success( \"Nothing changed. You can use the", "directory, otherwise in the provided directory. fixed_model_name: Name of the", "print_color(\"Core model training completed.\", color=bcolors.OKBLUE) if train_path is None: #", "/bf mod print_color(\"NLU model training completed.\", color=bcolors.OKBLUE) if train_path is", "to the domain file. config: Dict of paths to the", "{}).keys()) domain = await file_importer.get_domain() core_untrainable = domain.is_empty() or stories.is_empty()", "if train_path is None: # Only NLU was trained new_fingerprint", "the provided directory. fixed_model_name: Name of the model to be", "the model. Returns: If `train_path` is given it returns the", "train_path: Text, output_path: Text, force_training: bool, fixed_model_name: Optional[Text], persist_nlu_training_data: bool,", "from typing import Text, Optional, List, Union, Dict from rasa.importers.importer", "data/configuration did not change. No need to retrain NLU model.\",", "new model with \" \"the updated templates will be created.\",", "= await rasa.nlu.train( config[lang], file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, ) else:", "selected file_importer = TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data] ) training_datas = await", "the trained model files. \"\"\" loop = asyncio.get_event_loop() return loop.run_until_complete(", ") async def _train_nlu_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text]", "training since domain or stories are empty.\", color=bcolors.OKBLUE) for lang", "on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal (not compare) training", "stories: Path to the Core training data. output: Output path.", "import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def train( domain: Text, config: Text, training_files:", "Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains a Core model.", "domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments, ) print_color(\"Core model training", "model training completed.\", color=bcolors.OKBLUE) if train_path is None: # Only", "train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool", "is None: try: loop = asyncio.get_event_loop() except RuntimeError: loop =", "it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod", "file exists.\" ) return None if not await file_importer.get_stories(): print_error(", "nlu_untrainable = [l for l, d in nlu_data.items() if d.is_empty()]", "None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Train Core", "= False, ) -> Optional[Text]: \"\"\"Trains an NLU model. Args:", "Core training data. output: Output path. train_path: If `None` the", "nlu_data.is_empty(): # print_warning(\"No NLU data present. Just a Rasa Core", "async def handle_domain_if_not_exists( file_importer: TrainingDataImporter, output_path, fixed_model_name ): nlu_model_only =", "None if not await file_importer.get_stories(): print_error( \"No stories given. Please", "stored. persist_nlu_training_data: `True` if the NLU training data should be", "> if fingerprint_comparison.nlu == True: # replace True with list", "if fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, )", "compressed. additional_arguments: Additional training parameters. Returns: If `train_path` is given", "import model from rasa.model import FingerprintComparisonResult from rasa.core.domain import Domain", "None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments:", "file_importer: TrainingDataImporter, output_path, fixed_model_name ): nlu_model_only = await _train_nlu_with_validated_data( file_importer,", "Optional[Text]: loop = asyncio.get_event_loop() return loop.run_until_complete( train_core_async( domain=domain, config=config, stories=stories,", "rasa.utils.common import TempDirectoryPath from rasa.cli.utils import ( print_success, print_warning, print_error,", "no valid domain file was found. \" \"Please specify a", "You can use the old model stored at '{}'.\" \"\".format(os.path.abspath(old_model))", "langs fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain = await file_importer.get_domain() core_untrainable", "return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"nlu-\", ) return _train_path", "will be created.\", color=bcolors.OKBLUE, ) await model.update_model_with_new_domain(file_importer, train_path) else: print_color(", "templates section has been changed. A new model with \"", "skipping training...\".format(lang), color=bcolors.OKBLUE) # </ bf mod if fingerprint_comparison.is_training_required(): await", "be created.\", color=bcolors.OKBLUE, ) await model.update_model_with_new_domain(file_importer, train_path) else: print_color( \"Core", "fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, ) async def _train_nlu_with_validated_data( file_importer: TrainingDataImporter, output: Text,", "print_error( # \"No training data given. Please provide stories and", "[l for l in fingerprint_comparison.nlu if l not in nlu_untrainable]", ") -> Optional[Text]: \"\"\"Train Core with validated training and config", "fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) return model.package_model( fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path,", "an NLU model. Args: config: Path to the config file", "from rasa import model from rasa.model import FingerprintComparisonResult from rasa.core.domain", "fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core stories/configuration did not change. \" \"Only the", "persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(\"NLU data for language <{}> didn't change,", "model archive. \"\"\" # file_importer = TrainingDataImporter.load_from_config( # config, domain,", "# If the train path was provided, do nothing on", "old_model, train_path ) # bf mod > if fingerprint_comparison.nlu ==", "= None, ) -> Optional[Text]: loop = asyncio.get_event_loop() return loop.run_until_complete(", "output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) ) async def train_core_async( domain:", "and config data.\"\"\" import rasa.nlu.train with ExitStack() as stack: models", "it returns the path to the model archive, otherwise the", "Text, config: Text, training_files: Union[Text, List[Text]], output: Text = DEFAULT_MODELS_PATH,", "temp train path and clean it up on exit. _train_path", "NLU training data. output: Output path. train_path: If `None` the", "for Core and NLU. Keys are language codes training_files: Paths", "\" \"Only the templates section has been changed. A new", "typing import Text, Optional, List, Union, Dict from rasa.importers.importer import", "await _train_core_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) elif fingerprint_comparison_result.should_retrain_nlg():", "updated templates will be created.\", color=bcolors.OKBLUE, ) await model.update_model_with_new_domain(file_importer, train_path)", "Core. stories: Path to the Core training data. output: Output", "will not be compressed. additional_arguments: Additional training parameters. Returns: If", "output_path, fixed_model_name # ) # /bf mod return await _train_async_internal(", "to the directory with the trained model files. \"\"\" file_importer", "provided domain file exists.\" ) return None if not await", "as stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod from rasa_addons.importers", "DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text] = None, persist_nlu_training_data:", "Text, nlu_data: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name:", "model.should_retrain( new_fingerprint, old_model, train_path ) # bf mod > if", "= {} from rasa.nlu import config as cfg_loader if train_path:", "train_path: Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None, fixed_model_name: Optional[Text] = None,", "\"\"\" file_importer = TrainingDataImporter.load_core_importer_from_config( config, domain, [stories] ) domain =", "`True` retrain model even if data has not changed. fixed_model_name:", "additional_arguments=additional_arguments, ) elif fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core stories/configuration did not change.", "import FingerprintComparisonResult from rasa.core.domain import Domain from rasa.utils.common import TempDirectoryPath", "Just a Rasa Core model will be trained.\") # return", "= None, ) -> Optional[Text]: \"\"\"Trains a Core model. Args:", "= TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data] ) training_datas = await file_importer.get_nlu_data() if", "language <{}> didn't change, skipping training...\".format(lang), color=bcolors.OKBLUE) # /bf mod", "import rasa.nlu.train with ExitStack() as stack: models = {} from", "print_success( \"Nothing changed. You can use the old model stored", "on exit. _train_path = train_path else: # Otherwise, create a", "Union, Dict from rasa.importers.importer import TrainingDataImporter from rasa import model", "in order to train \" \"a Rasa NLU model using", "the '--nlu' argument.\" ) return # training NLU only hence", "fixed_model_name: Name of the model to be stored. persist_nlu_training_data: `True`", "model will be trained in a temporary directory, otherwise in", "stories in order to \" \"train a Rasa Core model", "should be persisted with the model. fixed_model_name: Name of model", "domain.is_empty(): print_error( \"Core training was skipped because no valid domain", "for language <{}> didn't change, skipping training...\".format(lang), color=bcolors.OKBLUE) # /bf", "# /bf mod return await _train_async_internal( file_importer, train_path, output_path, force_training,", "False, ) -> Optional[Text]: \"\"\"Trains an NLU model. Args: config:", "await file_importer.get_domain() core_untrainable = domain.is_empty() or stories.is_empty() nlu_untrainable = [l", "A new model with \" \"the updated templates will be", "NLU training data should be persisted with the model. fixed_model_name:", "with ExitStack() as stack: if train_path: # If the train", "lang in nlu_untrainable: print_color(\"No NLU data found for language <{}>,", "output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, ) else: print_color( \"NLU data/configuration", "train_path: If `None` the model will be trained in a", "None, persist_nlu_training_data: bool = False, ) -> Optional[Text]: \"\"\"Trains an", "None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, ):", "train a Rasa model using the '--data' argument.\" # )", "# print_warning(\"No NLU data present. Just a Rasa Core model", ") return nlu_model_only async def _train_async_internal( file_importer: TrainingDataImporter, train_path: Text,", "provided domain file exists.\" ) return nlu_model_only async def _train_async_internal(", "= await file_importer.get_domain() if domain.is_empty(): print_error( \"Core training was skipped", "if not fingerprint_comparison_result: fingerprint_comparison_result = FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data(", "await model.update_model_with_new_domain(file_importer, train_path) else: print_color( \"Core stories/configuration did not change.", ") else: print_color( \"NLU data/configuration did not change. No need", "# replace True with list of all langs fingerprint_comparison.nlu =", "do nothing on exit. _train_path = train_path else: # Otherwise,", "if stories.is_empty() and nlu_data.is_empty(): # print_error( # \"No training data", "of model to be stored. additional_arguments: Additional training parameters. Returns:", "nothing on exit. _train_path = train_path else: # Otherwise, create", "compare) training print_color(\"Training Core model...\", color=bcolors.OKBLUE) domain, config = await", "None: # Only Core was trained. new_fingerprint = await model.model_fingerprint(file_importer)", "fixed_model_name, persist_nlu_training_data, additional_arguments, ) async def handle_domain_if_not_exists( file_importer: TrainingDataImporter, output_path,", "Path to the NLU training data. output: Output path. train_path:", "l, d in nlu_data.items() if d.is_empty()] fingerprint_comparison.core = fingerprint_comparison.core and", "now.\" ) return return await _train_nlu_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name,", "import BotfrontFileImporter file_importer = BotfrontFileImporter(config, domain, training_files) # domain =", "l in fingerprint_comparison.nlu if l not in nlu_untrainable] if core_untrainable:", "import Domain from rasa.utils.common import TempDirectoryPath from rasa.cli.utils import (", "file_importer.get_domain() core_untrainable = domain.is_empty() or stories.is_empty() nlu_untrainable = [l for", "# Only Core was trained. new_fingerprint = await model.model_fingerprint(file_importer) return", "directory, otherwise in the provided directory. fixed_model_name: Name of model", "skipped because no valid domain file was found. \" \"Please", "the provided domain file exists.\" ) return None if not", "didn't change, skipping training...\".format(lang), color=bcolors.OKBLUE) # /bf mod print_color(\"NLU model", "handle_domain_if_not_exists( file_importer: TrainingDataImporter, output_path, fixed_model_name ): nlu_model_only = await _train_nlu_with_validated_data(", "train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, ) else: print_color( \"NLU data/configuration did", ") else: print_color(\"NLU data for language <{}> didn't change, skipping", "Paths to the training data for Core and NLU. output_path:", "( print_success, print_warning, print_error, bcolors, print_color, ) from rasa.constants import", "<{}>, skipping training...\".format(lang), color=bcolors.OKBLUE) # </ bf mod if fingerprint_comparison.is_training_required():", "True: # replace True with list of all langs fingerprint_comparison.nlu", "model.get_latest_model(output_path) fingerprint_comparison = FingerprintComparisonResult(force_training=force_training) if not force_training: fingerprint_comparison = model.should_retrain(", "_train_nlu_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, ) async def _train_nlu_with_validated_data(", "stories and NLU data in \" # \"order to train", "RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop.run_until_complete( train_async( domain=domain, config=config,", "be stored. persist_nlu_training_data: `True` if the NLU training data should", ") -> Optional[Text]: \"\"\"Trains a Core model. Args: domain: Path", ") training_datas = await file_importer.get_nlu_data() if training_datas.is_empty(): print_error( f\"Path '{nlu_data}'", "rasa.core.train with ExitStack() as stack: if train_path: # If the", "train_path: # If the train path was provided, do nothing", "of model to be stored. persist_nlu_training_data: `True` if the NLU", "TrainingDataImporter, output_path, fixed_model_name ): nlu_model_only = await _train_nlu_with_validated_data( file_importer, output=output_path,", "for lang in config: if config[lang]: print_color(\"Start training {} NLU", "using the '--stories' argument.\" ) return return await _train_core_with_validated_data( file_importer,", "been changed. A new model with \" \"the updated templates", "create a temp train path and clean it up on", "train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\", ) return _train_path def train_nlu( config: Text,", "ExitStack from typing import Text, Optional, List, Union, Dict from", "train_core( domain: Union[Domain, Text], config: Text, stories: Text, output: Text,", "config, training_data_paths=[nlu_data] ) training_datas = await file_importer.get_nlu_data() if training_datas.is_empty(): print_error(", "model ...\".format(lang), color=bcolors.OKBLUE) _, models[lang], _ = await rasa.nlu.train( config[lang],", "nlu_model_only = await _train_nlu_with_validated_data( file_importer, output=output_path, fixed_model_name=fixed_model_name ) print_warning( \"Core", "Core model...\", color=bcolors.OKBLUE) domain, config = await asyncio.gather( file_importer.get_domain(), file_importer.get_config()", "Domain from rasa.utils.common import TempDirectoryPath from rasa.cli.utils import ( print_success,", "clean it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal", "with the trained model files. \"\"\" loop = asyncio.get_event_loop() return", "to train a Rasa model using the '--data' argument.\" #", "= list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain = await file_importer.get_domain() core_untrainable = domain.is_empty()", "# bf mod from rasa_addons.importers import BotfrontFileImporter file_importer = BotfrontFileImporter(config,", "Name of model to be stored. uncompress: If `True` the", "= asyncio.get_event_loop() return loop.run_until_complete( _train_nlu_async( config, nlu_data, output, train_path, fixed_model_name,", "from rasa.cli.utils import ( print_success, print_warning, print_error, bcolors, print_color, )", "= None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None,", "Text, force_training: bool, fixed_model_name: Optional[Text], persist_nlu_training_data: bool, additional_arguments: Optional[Dict], )", "are empty.\", color=bcolors.OKBLUE) for lang in nlu_untrainable: print_color(\"No NLU data", "= True ) -> Optional[Text]: \"\"\"Train NLU with validated training", "nlu_data, output, train_path, fixed_model_name, persist_nlu_training_data, ) ) async def _train_nlu_async(", "Optional[asyncio.AbstractEventLoop] = None, ) -> Optional[Text]: if loop is None:", "output, train_path, fixed_model_name, persist_nlu_training_data, ) ) async def _train_nlu_async( config:", "Output path. force_training: If `True` retrain model even if data", "found. Only an nlu-model was created.\" \"Please specify a valid", "model.update_model_with_new_domain(file_importer, train_path) else: print_color( \"Core stories/configuration did not change. No", "import TrainingDataImporter from rasa import model from rasa.model import FingerprintComparisonResult", "asyncio import os import tempfile from contextlib import ExitStack from", "given. Please provide stories in order to \" \"train a", "force_training: bool, fixed_model_name: Optional[Text], persist_nlu_training_data: bool, additional_arguments: Optional[Dict], ) ->", "persisted with the model. Returns: If `train_path` is given it", "stories.is_empty() nlu_untrainable = [l for l, d in nlu_data.items() if", "stack: if train_path: # If the train path was provided,", "domain: Text, config: Text, training_files: Union[Text, List[Text]], output: Text =", "from rasa.model import FingerprintComparisonResult from rasa.core.domain import Domain from rasa.utils.common", "Optional[Dict], ) -> Optional[Text]: \"\"\"Trains a Rasa model (Core and", "directory with the trained model files. \"\"\" loop = asyncio.get_event_loop()", "fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, retrain_nlu: Union[bool,", "return return await _train_core_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, )", "lang in config: if config[lang]: print_color(\"Start training {} NLU model", "model using the '--data' argument.\" # ) # return #", "= False, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False,", "force_training: fingerprint_comparison = model.should_retrain( new_fingerprint, old_model, train_path ) # bf", "loop = asyncio.get_event_loop() return loop.run_until_complete( _train_nlu_async( config, nlu_data, output, train_path,", "not fingerprint_comparison_result: fingerprint_comparison_result = FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data( file_importer,", "to \" \"train a Rasa Core model using the '--stories'", "# if nlu_data.is_empty(): # print_warning(\"No NLU data present. Just a", "nlu_data.is_empty(): # print_error( # \"No training data given. Please provide", "if l not in nlu_untrainable] if core_untrainable: print_color(\"Skipping Core training", "Path to the config file for Core. stories: Path to", "if core_untrainable: print_color(\"Skipping Core training since domain or stories are", "Returns: Path of the trained model archive. \"\"\" # file_importer", "rasa_addons.importers import BotfrontFileImporter file_importer = BotfrontFileImporter(config, domain, training_files) # domain", "and not core_untrainable fingerprint_comparison.nlu = [l for l in fingerprint_comparison.nlu", "normal (not compare) training print_color(\"Training Core model...\", color=bcolors.OKBLUE) domain, config", "await _train_core_with_validated_data( # file_importer, # output=output_path, # fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments,", "additional_arguments=additional_arguments, ) return model.package_model( fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name, ) print_success(", "return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\", ) return _train_path", "templates will be created.\", color=bcolors.OKBLUE, ) await model.update_model_with_new_domain(file_importer, train_path) else:", "to retrain NLU model.\", color=bcolors.OKBLUE, ) def train_core( domain: Union[Domain,", "async def _train_core_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] =", "with the model. fixed_model_name: Name of model to be stored.", "Additional training parameters. Returns: Path of the trained model archive.", "additional_arguments, ) async def handle_domain_if_not_exists( file_importer: TrainingDataImporter, output_path, fixed_model_name ):", "config = await asyncio.gather( file_importer.get_domain(), file_importer.get_config() ) await rasa.core.train( domain_file=domain,", "print_color, ) from rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def train( domain:", "-> Optional[Text]: \"\"\"Trains a Core model. Args: domain: Path to", "file. config: Path to the config file for Core. stories:", "# domain = await file_importer.get_domain() # if domain.is_empty(): # return", "= stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod from rasa_addons.importers import BotfrontFileImporter file_importer", "def train_nlu( config: Text, nlu_data: Text, output: Text, train_path: Optional[Text]", "created.\" \"Please specify a valid domain using '--domain' argument or", "fingerprint_comparison.nlu if l not in nlu_untrainable] if core_untrainable: print_color(\"Skipping Core", "loop.run_until_complete( _train_nlu_async( config, nlu_data, output, train_path, fixed_model_name, persist_nlu_training_data, ) )", "Only NLU was trained new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model(", "if data has not changed. fixed_model_name: Name of model to", "\"Please verify the data format. \" \"The NLU model training", "= await file_importer.get_nlu_config(retrain_nlu) for lang in config: if config[lang]: print_color(\"Start", "# if stories.is_empty() and nlu_data.is_empty(): # print_error( # \"No training", "ExitStack() as stack: models = {} from rasa.nlu import config", "_train_nlu_async( config, nlu_data, output, train_path, fixed_model_name, persist_nlu_training_data, ) ) async", "train path and clean it up on exit. _train_path =", "fingerprint_comparison.is_training_required(): await _do_training( file_importer, output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments,", "in it. \" \"Please verify the data format. \" \"The", "None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, retrain_nlu:", "): nlu_model_only = await _train_nlu_with_validated_data( file_importer, output=output_path, fixed_model_name=fixed_model_name ) print_warning(", "the Core training data. output: Output path. train_path: If `None`", "path was provided, do nothing on exit. _train_path = train_path", "cfg_loader if train_path: # If the train path was provided,", "additional_arguments=additional_arguments, ) ) async def train_core_async( domain: Union[Domain, Text], config:", "model...\", color=bcolors.OKBLUE) domain, config = await asyncio.gather( file_importer.get_domain(), file_importer.get_config() )", "fingerprint_comparison.nlu = [l for l in fingerprint_comparison.nlu if l not", "= await _train_nlu_with_validated_data( file_importer, output=output_path, fixed_model_name=fixed_model_name ) print_warning( \"Core training", "given. Please provide stories and NLU data in \" #", "# Only NLU was trained new_fingerprint = await model.model_fingerprint(file_importer) return", "from rasa.nlu import config as cfg_loader if train_path: # If", "# return # if nlu_data.is_empty(): # print_warning(\"No NLU data present.", "Args: file_importer: `TrainingDataImporter` which supplies the training data. train_path: Directory", "if not force_training: fingerprint_comparison = model.should_retrain( new_fingerprint, old_model, train_path )", "as stack: models = {} from rasa.nlu import config as", "domain = await file_importer.get_domain() # if domain.is_empty(): # return await", "because no valid domain file was found. Only an nlu-model", "a Core model. Args: domain: Path to the domain file.", "the '--stories' argument.\" ) return return await _train_core_with_validated_data( file_importer, output=output,", "model. Returns: If `train_path` is given it returns the path", "retrain NLU model.\", color=bcolors.OKBLUE, ) def train_core( domain: Union[Domain, Text],", "file_importer.get_config() ) await rasa.core.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments,", "\"\"\"Train Core with validated training and config data.\"\"\" import rasa.core.train", "training_files: Paths to the training data for Core and NLU.", "domain: Path to the domain file. config: Path to the", "print_error( \"No NLU data given. Please provide NLU data in", "Dict from rasa.importers.importer import TrainingDataImporter from rasa import model from", "\" # \"order to train a Rasa model using the", "exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal (not compare) training print_color(\"Training", "DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments, ) print_color(\"Core model training completed.\", color=bcolors.OKBLUE) if", "None, persist_nlu_training_data: bool = False, ): if not nlu_data: print_error(", "file exists.\" ) return nlu_model_only async def _train_async_internal( file_importer: TrainingDataImporter,", "training files still have to be selected file_importer = TrainingDataImporter.load_nlu_importer_from_config(", "elif fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core stories/configuration did not change. \" \"Only", "else: print_color( \"NLU data/configuration did not change. No need to", "\"Nothing changed. You can use the old model stored at", "loop.run_until_complete( train_async( domain=domain, config=config, training_files=training_files, output_path=output, force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments,", "training data should be persisted with the model. fixed_model_name: Name", "FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments,", "the path to the directory with the trained model files.", "None, persist_nlu_training_data: bool = False, retrain_nlu: Union[bool, List[Text]] = True", "# config, domain, training_files # ) with ExitStack() as stack:", "if not await file_importer.get_stories(): print_error( \"No stories given. Please provide", "file_importer = TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data] ) training_datas = await file_importer.get_nlu_data()", "model stored at '{}'.\" \"\".format(os.path.abspath(old_model)) ) return old_model async def", "Optional[Union[Text, List[Text]]], output_path: Text = DEFAULT_MODELS_PATH, force_training: bool = False,", "= await asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data() ) # if stories.is_empty() and", "Optional[Text] = None, persist_nlu_training_data: bool = False, ) -> Optional[Text]:", "fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments, # ) new_fingerprint = await model.model_fingerprint(file_importer) old_model", "in the provided directory. fixed_model_name: Name of model to be", "training_data_paths=[nlu_data] ) training_datas = await file_importer.get_nlu_data() if training_datas.is_empty(): print_error( f\"Path", "config: if config[lang]: print_color(\"Start training {} NLU model ...\".format(lang), color=bcolors.OKBLUE)", "on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod config =", "bf mod from rasa_addons.importers import BotfrontFileImporter file_importer = BotfrontFileImporter(config, domain,", "train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) return model.package_model( fingerprint=new_fingerprint, output_directory=output_path,", "\"\"\"Trains an NLU model. Args: config: Path to the config", ") -> Optional[Text]: if loop is None: try: loop =", "if train_path: # If the train path was provided, do", "model. fixed_model_name: Name of model to be stored. additional_arguments: Additional", "_train_core_with_validated_data( # file_importer, # output=output_path, # fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments, #", "Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data:", "def train_core( domain: Union[Domain, Text], config: Text, stories: Text, output:", "_train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(\"NLU data for language <{}>", "was provided, do nothing on exit. _train_path = train_path else:", "`None` the model will be trained in a temporary directory,", "model.package_model( fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name, ) print_success( \"Nothing changed. You", "async def _train_nlu_async( config: Text, nlu_data: Text, output: Text, train_path:", "bf mod if fingerprint_comparison.is_training_required(): await _do_training( file_importer, output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison,", "List, Union, Dict from rasa.importers.importer import TrainingDataImporter from rasa import", "mod print_color(\"NLU model training completed.\", color=bcolors.OKBLUE) if train_path is None:", "nlu_data: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text]", "training completed.\", color=bcolors.OKBLUE) if train_path is None: # Only NLU", "List[Text]], output: Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name:", "config file for NLU. nlu_data: Path to the NLU training", "asyncio.get_event_loop() except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop.run_until_complete( train_async(", "return _train_path def train_nlu( config: Text, nlu_data: Text, output: Text,", "nlu_untrainable] if core_untrainable: print_color(\"Skipping Core training since domain or stories", "domain, training_files) # domain = await file_importer.get_domain() # if domain.is_empty():", "it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal (not", "training was skipped because no valid domain file was found.", "Additional training parameters. Returns: If `train_path` is given it returns", "fixed_model_name: Name of model to be stored. uncompress: If `True`", "be selected file_importer = TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data] ) training_datas =", "a Rasa model (Core and NLU). Args: domain: Path to", "domain using '--domain' argument or check if the provided domain", "and config data.\"\"\" import rasa.core.train with ExitStack() as stack: if", "new_fingerprint = await model.model_fingerprint(file_importer) old_model = model.get_latest_model(output_path) fingerprint_comparison = FingerprintComparisonResult(force_training=force_training)", "config data.\"\"\" import rasa.core.train with ExitStack() as stack: if train_path:", "rasa.core.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments, ) print_color(\"Core model", "# if domain.is_empty(): # return await handle_domain_if_not_exists( # file_importer, output_path,", "\" \"Please verify the data format. \" \"The NLU model", "to the Core training data. output: Output path. train_path: If", "None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, )", "models = {} from rasa.nlu import config as cfg_loader if", "Rasa Core model using the '--stories' argument.\" ) return return", "\"\"\" # file_importer = TrainingDataImporter.load_from_config( # config, domain, training_files #", "Optional[Text] = None, persist_nlu_training_data: bool = False, retrain_nlu: Union[bool, List[Text]]", "training_datas = await file_importer.get_nlu_data() if training_datas.is_empty(): print_error( f\"Path '{nlu_data}' doesn't", "Please provide stories in order to \" \"train a Rasa", "= [l for l in fingerprint_comparison.nlu if l not in", "Path to the domain file. config: Dict of paths to", "fixed_model_name ): nlu_model_only = await _train_nlu_with_validated_data( file_importer, output=output_path, fixed_model_name=fixed_model_name )", "NLU data in \" # \"order to train a Rasa", "the model. fixed_model_name: Name of model to be stored. additional_arguments:", "train_path else: # Otherwise, create a temp train path and", "Keys are language codes training_files: Paths to the training data", "# file_importer, # output=output_path, # fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments, # )", "Rasa model using the '--data' argument.\" # ) # return", "'--data' argument.\" # ) # return # if nlu_data.is_empty(): #", "`True` the model will not be compressed. additional_arguments: Additional training", "model_prefix=\"core-\", ) return _train_path def train_nlu( config: Text, nlu_data: Text,", "model using the '--nlu' argument.\" ) return # training NLU", "train the model. output_path: Output path. force_training: If `True` retrain", "the provided directory. fixed_model_name: Name of model to be stored.", "= False, additional_arguments: Optional[Dict] = None, ): if not fingerprint_comparison_result:", "await _train_nlu_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, ) else:", "nlu_data.items() if d.is_empty()] fingerprint_comparison.core = fingerprint_comparison.core and not core_untrainable fingerprint_comparison.nlu", "and clean it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) #", "provide stories and NLU data in \" # \"order to", "True with list of all langs fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\", {}).keys())", "config, domain, training_files # ) with ExitStack() as stack: train_path", "\" \"a Rasa NLU model using the '--nlu' argument.\" )", "did not change. No need to retrain Core model.\", color=bcolors.OKBLUE,", "replace True with list of all langs fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\",", "file_importer, output_path, fixed_model_name # ) # /bf mod return await", "domain=domain, config=config, training_files=training_files, output_path=output, force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) )", "be persisted with the model. additional_arguments: Additional training parameters. Returns:", "_train_async_internal( file_importer, train_path, output_path, force_training, fixed_model_name, persist_nlu_training_data, additional_arguments, ) async", "(Core and NLU). Use only from `train_async`. Args: file_importer: `TrainingDataImporter`", "have to be selected file_importer = TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data] )", "def handle_domain_if_not_exists( file_importer: TrainingDataImporter, output_path, fixed_model_name ): nlu_model_only = await", "stored. uncompress: If `True` the model will not be compressed.", "if domain.is_empty(): # return await handle_domain_if_not_exists( # file_importer, output_path, fixed_model_name", "= None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: loop", "new_fingerprint, old_model, train_path ) # bf mod > if fingerprint_comparison.nlu", "await _train_core_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) async def", "persist_nlu_training_data: `True` if the NLU training data should be persisted", "if fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data,", "the '--data' argument.\" # ) # return # if nlu_data.is_empty():", "was trained. new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output,", "\" \"Please specify a valid domain using '--domain' argument or", "exit. _train_path = train_path else: # Otherwise, create a temp", "# /bf mod print_color(\"NLU model training completed.\", color=bcolors.OKBLUE) if train_path", "print_color(\"Skipping Core training since domain or stories are empty.\", color=bcolors.OKBLUE)", "domain=domain, config=config, stories=stories, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) ) async", "retrain Core model.\", color=bcolors.OKBLUE, ) if fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data( file_importer,", "stories: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text]", "the data format. \" \"The NLU model training will be", "for Core. stories: Path to the Core training data. output:", "Dict of paths to the config for Core and NLU.", "handle_domain_if_not_exists( # file_importer, output_path, fixed_model_name # ) # /bf mod", "Rasa model (Core and NLU). Use only from `train_async`. Args:", "to train \" \"a Rasa NLU model using the '--nlu'", "if config[lang]: print_color(\"Start training {} NLU model ...\".format(lang), color=bcolors.OKBLUE) _,", "a temporary directory, otherwise in the provided directory. fixed_model_name: Name", "training completed.\", color=bcolors.OKBLUE) if train_path is None: # Only Core", "model will be trained.\") # return await _train_core_with_validated_data( # file_importer,", "core_untrainable = domain.is_empty() or stories.is_empty() nlu_untrainable = [l for l,", "list of all langs fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain =", "await file_importer.get_nlu_data() if training_datas.is_empty(): print_error( f\"Path '{nlu_data}' doesn't contain valid", "with the trained model files. \"\"\" file_importer = TrainingDataImporter.load_core_importer_from_config( config,", "return None if not await file_importer.get_stories(): print_error( \"No stories given.", "-> Optional[Text]: loop = asyncio.get_event_loop() return loop.run_until_complete( train_core_async( domain=domain, config=config,", "rasa.model import FingerprintComparisonResult from rasa.core.domain import Domain from rasa.utils.common import", "NLU training data should be persisted with the model. Returns:", "the model will not be compressed. additional_arguments: Additional training parameters.", "the NLU training data should be persisted with the model.", "the directory with the trained model files. \"\"\" loop =", "# Otherwise, create a temp train path and clean it", "training data. train_path: Directory in which to train the model.", "= None, ) -> Optional[Text]: \"\"\"Trains a Rasa model (Core", "= await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"nlu-\",", "given it returns the path to the model archive, otherwise", ") # /bf mod return await _train_async_internal( file_importer, train_path, output_path,", "additional_arguments=additional_arguments, # ) new_fingerprint = await model.model_fingerprint(file_importer) old_model = model.get_latest_model(output_path)", "= FingerprintComparisonResult(force_training=force_training) if not force_training: fingerprint_comparison = model.should_retrain( new_fingerprint, old_model,", "a Rasa model (Core and NLU). Use only from `train_async`.", "fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain = await file_importer.get_domain() core_untrainable =", "= await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\",", "fixed_model_name=fixed_model_name, ) print_success( \"Nothing changed. You can use the old", "persisted with the model. additional_arguments: Additional training parameters. Returns: Path", "provided directory. fixed_model_name: Name of model to be stored. uncompress:", "Core and NLU. output_path: Output path. force_training: If `True` retrain", "color=bcolors.OKBLUE) if train_path is None: # Only NLU was trained", "model. additional_arguments: Additional training parameters. Returns: Path of the trained", "Optional[Text]: \"\"\"Train Core with validated training and config data.\"\"\" import", "color=bcolors.OKBLUE, ) def train_core( domain: Union[Domain, Text], config: Text, stories:", "mod config = await file_importer.get_nlu_config(retrain_nlu) for lang in config: if", "DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def train( domain: Text, config: Text, training_files: Union[Text,", "Output path. train_path: If `None` the model will be trained", "NLU. nlu_data: Path to the NLU training data. output: Output", "persist_nlu_training_data, ) ) async def _train_nlu_async( config: Text, nlu_data: Text,", "NLU with validated training and config data.\"\"\" import rasa.nlu.train with", "with validated training and config data.\"\"\" import rasa.core.train with ExitStack()", "path to the model archive, otherwise the path to the", "color=bcolors.OKBLUE) _, models[lang], _ = await rasa.nlu.train( config[lang], file_importer, _train_path,", "training parameters. Returns: If `train_path` is given it returns the", "directory with the trained model files. \"\"\" file_importer = TrainingDataImporter.load_core_importer_from_config(", "trained model files. \"\"\" file_importer = TrainingDataImporter.load_core_importer_from_config( config, domain, [stories]", "as cfg_loader if train_path: # If the train path was", "or stories are empty.\", color=bcolors.OKBLUE) for lang in nlu_untrainable: print_color(\"No", "persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, ) ->", "train \" \"a Rasa NLU model using the '--nlu' argument.\"", "language codes training_files: Paths to the training data for Core", "parameters. Returns: Path of the trained model archive. \"\"\" #", ") # if stories.is_empty() and nlu_data.is_empty(): # print_error( # \"No", "and NLU). Args: domain: Path to the domain file. config:", "-> Optional[Text]: \"\"\"Trains an NLU model. Args: config: Path to", "the NLU training data. output: Output path. train_path: If `None`", "data for Core and NLU. output_path: Output path. force_training: If", "file_importer, output=output_path, fixed_model_name=fixed_model_name ) print_warning( \"Core training was skipped because", "rasa import model from rasa.model import FingerprintComparisonResult from rasa.core.domain import", "language <{}>, skipping training...\".format(lang), color=bcolors.OKBLUE) # </ bf mod if", "loop is None: try: loop = asyncio.get_event_loop() except RuntimeError: loop", "as stack: if train_path: # If the train path was", ") return return await _train_nlu_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data,", "of the trained model archive. \"\"\" stories, nlu_data = await", "_train_async_internal( file_importer: TrainingDataImporter, train_path: Text, output_path: Text, force_training: bool, fixed_model_name:", "NLU data in order to train \" \"a Rasa NLU", "= await file_importer.get_nlu_data() if training_datas.is_empty(): print_error( f\"Path '{nlu_data}' doesn't contain", "of the model to be stored. persist_nlu_training_data: `True` if the", "bf mod config = await file_importer.get_nlu_config(retrain_nlu) for lang in config:", ") async def _train_core_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text]", "bool, fixed_model_name: Optional[Text], persist_nlu_training_data: bool, additional_arguments: Optional[Dict], ) -> Optional[Text]:", "stories, nlu_data = await asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data() ) # if", "Core was trained. new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint,", "fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name, ) print_success( \"Nothing changed. You can", "a valid domain using '--domain' argument or check if the", "loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop.run_until_complete( train_async( domain=domain, config=config, training_files=training_files,", "trained model archive. \"\"\" stories, nlu_data = await asyncio.gather( file_importer.get_stories(),", ") return old_model async def _do_training( file_importer: TrainingDataImporter, output_path: Text,", "print_color( \"NLU data/configuration did not change. No need to retrain", "file for NLU. nlu_data: Path to the NLU training data.", "= None, loop: Optional[asyncio.AbstractEventLoop] = None, ) -> Optional[Text]: if", "the trained model archive. \"\"\" stories, nlu_data = await asyncio.gather(", "= model.should_retrain( new_fingerprint, old_model, train_path ) # bf mod >", "\" \"The NLU model training will be skipped now.\" )", "in nlu_data.items() if d.is_empty()] fingerprint_comparison.core = fingerprint_comparison.core and not core_untrainable", "'--stories' argument.\" ) return return await _train_core_with_validated_data( file_importer, output=output, train_path=train_path,", "# file_importer = TrainingDataImporter.load_from_config( # config, domain, training_files # )", "import TempDirectoryPath from rasa.cli.utils import ( print_success, print_warning, print_error, bcolors,", "\"No NLU data given. Please provide NLU data in order", "fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) elif", ") await rasa.core.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments, )", "not be compressed. additional_arguments: Additional training parameters. Returns: If `train_path`", "completed.\", color=bcolors.OKBLUE) if train_path is None: # Only Core was", "await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"nlu-\", )", "training NLU only hence the training files still have to", "be persisted with the model. Returns: If `train_path` is given", "train_async( domain=domain, config=config, training_files=training_files, output_path=output, force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, )", "to the NLU training data. output: Output path. train_path: If", "data in \" # \"order to train a Rasa model", "rasa.nlu import config as cfg_loader if train_path: # If the", "has not changed. fixed_model_name: Name of model to be stored.", "def _do_training( file_importer: TrainingDataImporter, output_path: Text, train_path: Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult]", "else: # Otherwise, create a temp train path and clean", "train_path is None: # Only NLU was trained new_fingerprint =", "trained model archive. \"\"\" # file_importer = TrainingDataImporter.load_from_config( # config,", "with validated training and config data.\"\"\" import rasa.nlu.train with ExitStack()", "If the train path was provided, do nothing on exit.", "Text, training_files: Union[Text, List[Text]], output: Text = DEFAULT_MODELS_PATH, force_training: bool", "`True` retrain model even if data has not changed. persist_nlu_training_data:", "in fingerprint_comparison.nlu if l not in nlu_untrainable] if core_untrainable: print_color(\"Skipping", "fixed_model_name: Name of model to be stored. additional_arguments: Additional training", "Path to the domain file. config: Path to the config", "for NLU. nlu_data: Path to the NLU training data. output:", "using '--domain' argument or check if the provided domain file", "Optional[Text], persist_nlu_training_data: bool, additional_arguments: Optional[Dict], ) -> Optional[Text]: \"\"\"Trains a", "the path to the model archive, otherwise the path to", "domain or stories are empty.\", color=bcolors.OKBLUE) for lang in nlu_untrainable:", "for language <{}>, skipping training...\".format(lang), color=bcolors.OKBLUE) # </ bf mod", "found. \" \"Please specify a valid domain using '--domain' argument", "output: Output path. train_path: If `None` the model will be", "output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) return model.package_model( fingerprint=new_fingerprint,", "List[Text]]], output_path: Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name:", "not change. \" \"Only the templates section has been changed.", "await asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data() ) # if stories.is_empty() and nlu_data.is_empty():", "if the NLU training data should be persisted with the", "await handle_domain_if_not_exists( # file_importer, output_path, fixed_model_name # ) # /bf", "= domain.is_empty() or stories.is_empty() nlu_untrainable = [l for l, d", "should be persisted with the model. additional_arguments: Additional training parameters.", "up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod config", "): if not fingerprint_comparison_result: fingerprint_comparison_result = FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core(): await", "output_path=output, force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) ) async def train_async(", "retrain model even if data has not changed. fixed_model_name: Name", "asyncio.get_event_loop() return loop.run_until_complete( train_core_async( domain=domain, config=config, stories=stories, output=output, train_path=train_path, fixed_model_name=fixed_model_name,", "async def _do_training( file_importer: TrainingDataImporter, output_path: Text, train_path: Text, fingerprint_comparison_result:", "stored. additional_arguments: Additional training parameters. Returns: Path of the trained", "valid domain file was found. \" \"Please specify a valid", "config, domain, [stories] ) domain = await file_importer.get_domain() if domain.is_empty():", "model even if data has not changed. persist_nlu_training_data: `True` if", ") -> Optional[Text]: \"\"\"Train NLU with validated training and config", "'{}'.\" \"\".format(os.path.abspath(old_model)) ) return old_model async def _do_training( file_importer: TrainingDataImporter,", "(not compare) training print_color(\"Training Core model...\", color=bcolors.OKBLUE) domain, config =", "# print_error( # \"No training data given. Please provide stories", "training print_color(\"Training Core model...\", color=bcolors.OKBLUE) domain, config = await asyncio.gather(", "config for Core and NLU. Keys are language codes training_files:", "fingerprint_comparison = FingerprintComparisonResult(force_training=force_training) if not force_training: fingerprint_comparison = model.should_retrain( new_fingerprint,", "mod if fingerprint_comparison.is_training_required(): await _do_training( file_importer, output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name,", "persist_nlu_training_data: bool = False, ) -> Optional[Text]: \"\"\"Trains an NLU", "additional_arguments=additional_arguments, ) ) async def train_async( domain: Union[Domain, Text], config:", "Optional[Dict] = None, loop: Optional[asyncio.AbstractEventLoop] = None, ) -> Optional[Text]:", "model. output_path: Output path. force_training: If `True` retrain model even", "rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def train( domain: Text, config: Text,", "Name of model to be stored. additional_arguments: Additional training parameters.", "output=output, train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, ) async def _train_nlu_with_validated_data( file_importer: TrainingDataImporter,", "\"Core stories/configuration did not change. No need to retrain Core", "If `train_path` is given it returns the path to the", "Core model. Args: domain: Path to the domain file. config:", "be compressed. additional_arguments: Additional training parameters. Returns: If `train_path` is", "NLU). Use only from `train_async`. Args: file_importer: `TrainingDataImporter` which supplies", "model.\", color=bcolors.OKBLUE, ) if fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data( file_importer, output=output_path, train_path=train_path,", "fixed_model_name=fixed_model_name ) print_warning( \"Core training was skipped because no valid", "stories/configuration did not change. \" \"Only the templates section has", "TrainingDataImporter, output_path: Text, train_path: Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None, fixed_model_name:", "for Core and NLU. output_path: Output path. force_training: If `True`", "file_importer.get_domain() if domain.is_empty(): print_error( \"Core training was skipped because no", "training_files) # domain = await file_importer.get_domain() # if domain.is_empty(): #", "print_success, print_warning, print_error, bcolors, print_color, ) from rasa.constants import DEFAULT_MODELS_PATH,", "stories/configuration did not change. No need to retrain Core model.\",", "config: Text, nlu_data: Text, output: Text, train_path: Optional[Text] = None,", ") async def handle_domain_if_not_exists( file_importer: TrainingDataImporter, output_path, fixed_model_name ): nlu_model_only", "if the provided domain file exists.\" ) return nlu_model_only async", "fingerprint_comparison.core and not core_untrainable fingerprint_comparison.nlu = [l for l in", "given. Please provide NLU data in order to train \"", "config=config, training_files=training_files, output_path=output, force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) ) async", "of the trained model archive. \"\"\" # file_importer = TrainingDataImporter.load_from_config(", "None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, ):", "nlu_model_only async def _train_async_internal( file_importer: TrainingDataImporter, train_path: Text, output_path: Text,", "d.is_empty()] fingerprint_comparison.core = fingerprint_comparison.core and not core_untrainable fingerprint_comparison.nlu = [l", "file was found. Only an nlu-model was created.\" \"Please specify", "additional_arguments=additional_arguments, ) async def _train_core_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path:", "await file_importer.get_nlu_config(retrain_nlu) for lang in config: if config[lang]: print_color(\"Start training", "model (Core and NLU). Use only from `train_async`. Args: file_importer:", "provided directory. fixed_model_name: Name of the model to be stored.", "training and config data.\"\"\" import rasa.nlu.train with ExitStack() as stack:", "= fingerprint_comparison.core and not core_untrainable fingerprint_comparison.nlu = [l for l", "NLU data found for language <{}>, skipping training...\".format(lang), color=bcolors.OKBLUE) #", "or check if the provided domain file exists.\" ) return", "_train_core_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None, fixed_model_name:", ") async def train_core_async( domain: Union[Domain, Text], config: Text, stories:", "f\"Path '{nlu_data}' doesn't contain valid NLU data in it. \"", "even if data has not changed. persist_nlu_training_data: `True` if the", "was trained new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output,", "from contextlib import ExitStack from typing import Text, Optional, List,", "= TrainingDataImporter.load_from_config( # config, domain, training_files # ) with ExitStack()", "training {} NLU model ...\".format(lang), color=bcolors.OKBLUE) _, models[lang], _ =", "return await handle_domain_if_not_exists( # file_importer, output_path, fixed_model_name # ) #", "in nlu_untrainable: print_color(\"No NLU data found for language <{}>, skipping", "since domain or stories are empty.\", color=bcolors.OKBLUE) for lang in", "persist_nlu_training_data: bool = False, ): if not nlu_data: print_error( \"No", "return old_model async def _do_training( file_importer: TrainingDataImporter, output_path: Text, train_path:", "Directory in which to train the model. output_path: Output path.", "file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, ) async def _train_nlu_with_validated_data( file_importer:", "_, models[lang], _ = await rasa.nlu.train( config[lang], file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang),", "has been changed. A new model with \" \"the updated", "\"the updated templates will be created.\", color=bcolors.OKBLUE, ) await model.update_model_with_new_domain(file_importer,", "= False, ): if not nlu_data: print_error( \"No NLU data", "be persisted with the model. fixed_model_name: Name of model to", "additional_arguments: Optional[Dict] = None, ): if not fingerprint_comparison_result: fingerprint_comparison_result =", "loop = asyncio.get_event_loop() return loop.run_until_complete( train_core_async( domain=domain, config=config, stories=stories, output=output,", "If `True` the model will not be compressed. additional_arguments: Additional", "\"Core training was skipped because no valid domain file was", "bool, additional_arguments: Optional[Dict], ) -> Optional[Text]: \"\"\"Trains a Rasa model", ") return _train_path def train_nlu( config: Text, nlu_data: Text, output:", "training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments, ) print_color(\"Core model training completed.\",", "\"The NLU model training will be skipped now.\" ) return", "to be stored. additional_arguments: Additional training parameters. Returns: Path of", "file_importer.get_stories(), file_importer.get_nlu_data() ) # if stories.is_empty() and nlu_data.is_empty(): # print_error(", "model. Args: domain: Path to the domain file. config: Path", "ExitStack() as stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod from", "of paths to the config for Core and NLU. Keys", "color=bcolors.OKBLUE) # </ bf mod if fingerprint_comparison.is_training_required(): await _do_training( file_importer,", "a Rasa model using the '--data' argument.\" # ) #", "= None, persist_nlu_training_data: bool = False, retrain_nlu: Union[bool, List[Text]] =", "model to be stored. uncompress: If `True` the model will", "train_path, fixed_model_name, persist_nlu_training_data, ) ) async def _train_nlu_async( config: Text,", "False, ): if not nlu_data: print_error( \"No NLU data given.", "train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) async def _train_core_with_validated_data( file_importer: TrainingDataImporter, output:", "_ = await rasa.nlu.train( config[lang], file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, )", "is None: # Only NLU was trained new_fingerprint = await", "the trained model files. \"\"\" file_importer = TrainingDataImporter.load_core_importer_from_config( config, domain,", "fixed_model_name: Name of model to be stored. persist_nlu_training_data: `True` if", "bool = False, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]:", "fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) elif fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core stories/configuration did not", "model archive. \"\"\" stories, nlu_data = await asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data()", "Optional[Text] = None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]:", "fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(\"NLU data for language <{}> didn't", "data should be persisted with the model. additional_arguments: Additional training", "config: Path to the config file for Core. stories: Path", "doesn't contain valid NLU data in it. \" \"Please verify", "not change. No need to retrain Core model.\", color=bcolors.OKBLUE, )", "Optional, List, Union, Dict from rasa.importers.importer import TrainingDataImporter from rasa", "If `None` the model will be trained in a temporary", "the model. output_path: Output path. force_training: If `True` retrain model", "which supplies the training data. train_path: Directory in which to", "= await asyncio.gather( file_importer.get_domain(), file_importer.get_config() ) await rasa.core.train( domain_file=domain, training_resource=file_importer,", "fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, ): if", "an nlu-model was created.\" \"Please specify a valid domain using", "skipped now.\" ) return return await _train_nlu_with_validated_data( file_importer, output=output, train_path=train_path,", "directory. fixed_model_name: Name of the model to be stored. persist_nlu_training_data:", "else: print_color(\"NLU data for language <{}> didn't change, skipping training...\".format(lang),", "validated training and config data.\"\"\" import rasa.core.train with ExitStack() as", "config file for Core. stories: Path to the Core training", "TrainingDataImporter, train_path: Text, output_path: Text, force_training: bool, fixed_model_name: Optional[Text], persist_nlu_training_data:", "and NLU). Use only from `train_async`. Args: file_importer: `TrainingDataImporter` which", "\"No stories given. Please provide stories in order to \"", "model using the '--stories' argument.\" ) return return await _train_core_with_validated_data(", "domain file. config: Dict of paths to the config for", "Text], training_files: Optional[Union[Text, List[Text]]], output_path: Text = DEFAULT_MODELS_PATH, force_training: bool", "exists.\" ) return nlu_model_only async def _train_async_internal( file_importer: TrainingDataImporter, train_path:", "training data should be persisted with the model. additional_arguments: Additional", "\"No training data given. Please provide stories and NLU data", "model with \" \"the updated templates will be created.\", color=bcolors.OKBLUE,", "model even if data has not changed. fixed_model_name: Name of", "otherwise in the provided directory. fixed_model_name: Name of the model", "need to retrain Core model.\", color=bcolors.OKBLUE, ) if fingerprint_comparison_result.should_retrain_nlu(): await", "Core model.\", color=bcolors.OKBLUE, ) if fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data( file_importer, output=output_path,", "the training data for Core and NLU. output_path: Output path.", "# file_importer, output_path, fixed_model_name # ) # /bf mod return", "paths to the config for Core and NLU. Keys are", "= None, fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict] = None,", "did not change. No need to retrain NLU model.\", color=bcolors.OKBLUE,", "'--nlu' argument.\" ) return # training NLU only hence the", "be stored. additional_arguments: Additional training parameters. Returns: Path of the", "= stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal (not compare) training print_color(\"Training Core model...\",", "await rasa.nlu.train( config[lang], file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(\"NLU", "empty.\", color=bcolors.OKBLUE) for lang in nlu_untrainable: print_color(\"No NLU data found", "to be selected file_importer = TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data] ) training_datas", "try: loop = asyncio.get_event_loop() except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop)", "output_path: Output path. force_training: If `True` retrain model even if", "persist_nlu_training_data: bool, additional_arguments: Optional[Dict], ) -> Optional[Text]: \"\"\"Trains a Rasa", "return nlu_model_only async def _train_async_internal( file_importer: TrainingDataImporter, train_path: Text, output_path:", "async def train_core_async( domain: Union[Domain, Text], config: Text, stories: Text,", "the training files still have to be selected file_importer =", "model files. \"\"\" loop = asyncio.get_event_loop() return loop.run_until_complete( _train_nlu_async( config,", "rasa.nlu.train( config[lang], file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(\"NLU data", "clean it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf", "to the config for Core and NLU. Keys are language", "file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(\"NLU data for language", "be skipped now.\" ) return return await _train_nlu_with_validated_data( file_importer, output=output,", "Text, Optional, List, Union, Dict from rasa.importers.importer import TrainingDataImporter from", "Union[Text, List[Text]], output: Text = DEFAULT_MODELS_PATH, force_training: bool = False,", "archive, otherwise the path to the directory with the trained", "files. \"\"\" file_importer = TrainingDataImporter.load_core_importer_from_config( config, domain, [stories] ) domain", "config = await file_importer.get_nlu_config(retrain_nlu) for lang in config: if config[lang]:", "-> Optional[Text]: \"\"\"Trains a Rasa model (Core and NLU). Use", "in order to \" \"train a Rasa Core model using", "os import tempfile from contextlib import ExitStack from typing import", "</ bf mod if fingerprint_comparison.is_training_required(): await _do_training( file_importer, output_path=output_path, train_path=train_path,", "model files. \"\"\" file_importer = TrainingDataImporter.load_core_importer_from_config( config, domain, [stories] )", "train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) elif fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core stories/configuration did", "if d.is_empty()] fingerprint_comparison.core = fingerprint_comparison.core and not core_untrainable fingerprint_comparison.nlu =", "...\".format(lang), color=bcolors.OKBLUE) _, models[lang], _ = await rasa.nlu.train( config[lang], file_importer,", "= None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains", "fixed_model_name=fixed_model_name, model_prefix=\"core-\", ) return _train_path def train_nlu( config: Text, nlu_data:", "Optional[Text]: \"\"\"Trains a Rasa model (Core and NLU). Args: domain:", "train( domain: Text, config: Text, training_files: Union[Text, List[Text]], output: Text", "verify the data format. \" \"The NLU model training will", "retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, ) else: print_color( \"NLU data/configuration did not change.", "import ( print_success, print_warning, print_error, bcolors, print_color, ) from rasa.constants", "valid domain file was found. Only an nlu-model was created.\"", "Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, additional_arguments:", "model from rasa.model import FingerprintComparisonResult from rasa.core.domain import Domain from", "with \" \"the updated templates will be created.\", color=bcolors.OKBLUE, )", ") print_warning( \"Core training was skipped because no valid domain", "Args: domain: Path to the domain file. config: Dict of", "fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) return model.package_model( fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name,", "can use the old model stored at '{}'.\" \"\".format(os.path.abspath(old_model)) )", "-> Optional[Text]: \"\"\"Train NLU with validated training and config data.\"\"\"", "in the provided directory. fixed_model_name: Name of the model to", "directory. fixed_model_name: Name of model to be stored. uncompress: If", "Optional[Text]: \"\"\"Trains a Core model. Args: domain: Path to the", "NLU was trained new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint,", "NLU model using the '--nlu' argument.\" ) return # training", "model to be stored. persist_nlu_training_data: `True` if the NLU training", "valid domain using '--domain' argument or check if the provided", "print_error( f\"Path '{nlu_data}' doesn't contain valid NLU data in it.", "from `train_async`. Args: file_importer: `TrainingDataImporter` which supplies the training data.", "None, fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict] = None, )", "NLU data present. Just a Rasa Core model will be", "loop: Optional[asyncio.AbstractEventLoop] = None, ) -> Optional[Text]: if loop is", "which to train the model. output_path: Output path. force_training: If", "training data for Core and NLU. output_path: Output path. force_training:", "_train_core_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) elif fingerprint_comparison_result.should_retrain_nlg(): print_color(", "additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains a Core", "data.\"\"\" import rasa.nlu.train with ExitStack() as stack: models = {}", "stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod from rasa_addons.importers import BotfrontFileImporter file_importer =", "force_training, fixed_model_name, persist_nlu_training_data, additional_arguments, ) async def handle_domain_if_not_exists( file_importer: TrainingDataImporter,", "exists.\" ) return None if not await file_importer.get_stories(): print_error( \"No", "fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) async def _train_core_with_validated_data( file_importer: TrainingDataImporter, output: Text,", "= False, additional_arguments: Optional[Dict] = None, loop: Optional[asyncio.AbstractEventLoop] = None,", "import Text, Optional, List, Union, Dict from rasa.importers.importer import TrainingDataImporter", "BotfrontFileImporter file_importer = BotfrontFileImporter(config, domain, training_files) # domain = await", "training data should be persisted with the model. Returns: If", "order to train \" \"a Rasa NLU model using the", "{} from rasa.nlu import config as cfg_loader if train_path: #", "a Rasa Core model using the '--stories' argument.\" ) return", "contain valid NLU data in it. \" \"Please verify the", "stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod from rasa_addons.importers import", "def train_async( domain: Union[Domain, Text], config: Dict[Text, Text], training_files: Optional[Union[Text,", "to the directory with the trained model files. \"\"\" loop", "# output=output_path, # fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments, # ) new_fingerprint =", "not changed. fixed_model_name: Name of model to be stored. persist_nlu_training_data:", "= None, ) -> Optional[Text]: \"\"\"Train Core with validated training", "return # training NLU only hence the training files still", "output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) elif fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core stories/configuration", "to be stored. persist_nlu_training_data: `True` if the NLU training data", ") -> Optional[Text]: loop = asyncio.get_event_loop() return loop.run_until_complete( train_core_async( domain=domain,", "None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, loop:", "model training will be skipped now.\" ) return return await", "will be trained in a temporary directory, otherwise in the", "data in it. \" \"Please verify the data format. \"", "additional_arguments=additional_arguments, ) print_color(\"Core model training completed.\", color=bcolors.OKBLUE) if train_path is", "return loop.run_until_complete( train_async( domain=domain, config=config, training_files=training_files, output_path=output, force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data,", "be stored. uncompress: If `True` the model will not be", "is given it returns the path to the model archive,", "the trained model archive. \"\"\" # file_importer = TrainingDataImporter.load_from_config( #", "a Rasa Core model will be trained.\") # return await", "# return await handle_domain_if_not_exists( # file_importer, output_path, fixed_model_name # )", "bool = False, additional_arguments: Optional[Dict] = None, ): if not", "asyncio.get_event_loop() return loop.run_until_complete( _train_nlu_async( config, nlu_data, output, train_path, fixed_model_name, persist_nlu_training_data,", "\"a Rasa NLU model using the '--nlu' argument.\" ) return", "model (Core and NLU). Args: domain: Path to the domain", "`TrainingDataImporter` which supplies the training data. train_path: Directory in which", "file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) async def _train_core_with_validated_data( file_importer:", "domain, [stories] ) domain = await file_importer.get_domain() if domain.is_empty(): print_error(", "training parameters. Returns: Path of the trained model archive. \"\"\"", "training and config data.\"\"\" import rasa.core.train with ExitStack() as stack:", "): if not nlu_data: print_error( \"No NLU data given. Please", ") async def _train_nlu_async( config: Text, nlu_data: Text, output: Text,", "hence the training files still have to be selected file_importer", "\"\"\"Trains a Rasa model (Core and NLU). Use only from", "= None, persist_nlu_training_data: bool = False, ) -> Optional[Text]: \"\"\"Trains", "Returns: If `train_path` is given it returns the path to", ") return # training NLU only hence the training files", "await asyncio.gather( file_importer.get_domain(), file_importer.get_config() ) await rasa.core.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path,", "not nlu_data: print_error( \"No NLU data given. Please provide NLU", "'--domain' argument or check if the provided domain file exists.\"", "otherwise in the provided directory. fixed_model_name: Name of model to", "Union[bool, List[Text]] = True ) -> Optional[Text]: \"\"\"Train NLU with", "print_color(\"NLU model training completed.\", color=bcolors.OKBLUE) if train_path is None: #", "fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\", ) return _train_path def train_nlu(", "NLU training data should be persisted with the model. additional_arguments:", "core_untrainable: print_color(\"Skipping Core training since domain or stories are empty.\",", "await _train_nlu_with_validated_data( file_importer, output=output_path, fixed_model_name=fixed_model_name ) print_warning( \"Core training was", "for lang in nlu_untrainable: print_color(\"No NLU data found for language", "change, skipping training...\".format(lang), color=bcolors.OKBLUE) # /bf mod print_color(\"NLU model training", "import rasa.core.train with ExitStack() as stack: if train_path: # If", "has not changed. persist_nlu_training_data: `True` if the NLU training data", "list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain = await file_importer.get_domain() core_untrainable = domain.is_empty() or", "nlu_untrainable: print_color(\"No NLU data found for language <{}>, skipping training...\".format(lang),", "from rasa.core.domain import Domain from rasa.utils.common import TempDirectoryPath from rasa.cli.utils", "bool = False, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool =", "Only an nlu-model was created.\" \"Please specify a valid domain", "train_async( domain: Union[Domain, Text], config: Dict[Text, Text], training_files: Optional[Union[Text, List[Text]]],", "{} NLU model ...\".format(lang), color=bcolors.OKBLUE) _, models[lang], _ = await", "domain: Union[Domain, Text], config: Text, stories: Text, output: Text, train_path:", "color=bcolors.OKBLUE) for lang in nlu_untrainable: print_color(\"No NLU data found for", "no valid domain file was found. Only an nlu-model was", ") return model.package_model( fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name, ) print_success( \"Nothing", "bf mod > if fingerprint_comparison.nlu == True: # replace True", "the model. additional_arguments: Additional training parameters. Returns: Path of the", ") elif fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core stories/configuration did not change. \"", "\"Core stories/configuration did not change. \" \"Only the templates section", "persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) return model.package_model( fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name, )", "Dict[Text, Text], training_files: Optional[Union[Text, List[Text]]], output_path: Text = DEFAULT_MODELS_PATH, force_training:", "changed. fixed_model_name: Name of model to be stored. persist_nlu_training_data: `True`", "None: # Only NLU was trained new_fingerprint = await model.model_fingerprint(file_importer)", "fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool", "data.\"\"\" import rasa.core.train with ExitStack() as stack: if train_path: #", "Returns: Path of the trained model archive. \"\"\" stories, nlu_data", "l not in nlu_untrainable] if core_untrainable: print_color(\"Skipping Core training since", "= asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop.run_until_complete( train_async( domain=domain, config=config, training_files=training_files, output_path=output,", "domain = await file_importer.get_domain() core_untrainable = domain.is_empty() or stories.is_empty() nlu_untrainable", "retrain model even if data has not changed. persist_nlu_training_data: `True`", "# ) # /bf mod return await _train_async_internal( file_importer, train_path,", "return await _train_core_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) async", "output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments, ) print_color(\"Core model training completed.\", color=bcolors.OKBLUE)", "in config: if config[lang]: print_color(\"Start training {} NLU model ...\".format(lang),", "No need to retrain Core model.\", color=bcolors.OKBLUE, ) if fingerprint_comparison_result.should_retrain_nlu():", "print_color( \"Core stories/configuration did not change. No need to retrain", "FingerprintComparisonResult(force_training=force_training) if not force_training: fingerprint_comparison = model.should_retrain( new_fingerprint, old_model, train_path", "the config for Core and NLU. Keys are language codes", "the directory with the trained model files. \"\"\" file_importer =", "domain file was found. \" \"Please specify a valid domain", "fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) ) async def train_async( domain: Union[Domain,", "provide NLU data in order to train \" \"a Rasa", "trained in a temporary directory, otherwise in the provided directory.", "\"order to train a Rasa model using the '--data' argument.\"", "check if the provided domain file exists.\" ) return nlu_model_only", "Optional[Text]: \"\"\"Train NLU with validated training and config data.\"\"\" import", "data given. Please provide stories and NLU data in \"", "persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, loop: Optional[asyncio.AbstractEventLoop]", "TrainingDataImporter.load_from_config( # config, domain, training_files # ) with ExitStack() as", "parameters. Returns: If `train_path` is given it returns the path", "Union[Domain, Text], config: Text, stories: Text, output: Text, train_path: Optional[Text]", "train_core_async( domain=domain, config=config, stories=stories, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) )", "data given. Please provide NLU data in order to train", "= await file_importer.get_domain() core_untrainable = domain.is_empty() or stories.is_empty() nlu_untrainable =", "or stories.is_empty() nlu_untrainable = [l for l, d in nlu_data.items()", "trained. new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path,", "output=output_path, # fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments, # ) new_fingerprint = await", "= asyncio.get_event_loop() return loop.run_until_complete( train_core_async( domain=domain, config=config, stories=stories, output=output, train_path=train_path,", "argument.\" ) return return await _train_core_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name,", "not await file_importer.get_stories(): print_error( \"No stories given. Please provide stories", "DEFAULT_CORE_SUBDIRECTORY_NAME def train( domain: Text, config: Text, training_files: Union[Text, List[Text]],", "Please provide stories and NLU data in \" # \"order", "found for language <{}>, skipping training...\".format(lang), color=bcolors.OKBLUE) # </ bf", "of model to be stored. uncompress: If `True` the model", "def _train_core_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None,", "Optional[Text]: \"\"\"Trains a Rasa model (Core and NLU). Use only", "domain: Union[Domain, Text], config: Dict[Text, Text], training_files: Optional[Union[Text, List[Text]]], output_path:", "== True: # replace True with list of all langs", "= False, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains", "TrainingDataImporter, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] =", "old_model = model.get_latest_model(output_path) fingerprint_comparison = FingerprintComparisonResult(force_training=force_training) if not force_training: fingerprint_comparison", "Text], config: Dict[Text, Text], training_files: Optional[Union[Text, List[Text]]], output_path: Text =", "await _train_async_internal( file_importer, train_path, output_path, force_training, fixed_model_name, persist_nlu_training_data, additional_arguments, )", "skipped because no valid domain file was found. Only an", "= None, ) -> Optional[Text]: if loop is None: try:", "argument.\" # ) # return # if nlu_data.is_empty(): # print_warning(\"No", "file_importer: TrainingDataImporter, train_path: Text, output_path: Text, force_training: bool, fixed_model_name: Optional[Text],", "changed. persist_nlu_training_data: `True` if the NLU training data should be", "model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"nlu-\", ) return", "and nlu_data.is_empty(): # print_error( # \"No training data given. Please", "_train_nlu_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None, fixed_model_name:", "section has been changed. A new model with \" \"the", "uncompress: If `True` the model will not be compressed. additional_arguments:", "FingerprintComparisonResult from rasa.core.domain import Domain from rasa.utils.common import TempDirectoryPath from", "domain.is_empty() or stories.is_empty() nlu_untrainable = [l for l, d in", "codes training_files: Paths to the training data for Core and", "policy_config=config, additional_arguments=additional_arguments, ) print_color(\"Core model training completed.\", color=bcolors.OKBLUE) if train_path", "= None, persist_nlu_training_data: bool = False, ): if not nlu_data:", "False, retrain_nlu: Union[bool, List[Text]] = True ) -> Optional[Text]: \"\"\"Train", "# bf mod config = await file_importer.get_nlu_config(retrain_nlu) for lang in", "the old model stored at '{}'.\" \"\".format(os.path.abspath(old_model)) ) return old_model", "the model to be stored. persist_nlu_training_data: `True` if the NLU", "stack: models = {} from rasa.nlu import config as cfg_loader", "stories are empty.\", color=bcolors.OKBLUE) for lang in nlu_untrainable: print_color(\"No NLU", "_train_path def train_nlu( config: Text, nlu_data: Text, output: Text, train_path:", "nlu_data: print_error( \"No NLU data given. Please provide NLU data", "in which to train the model. output_path: Output path. force_training:", "be trained.\") # return await _train_core_with_validated_data( # file_importer, # output=output_path,", "# bf mod > if fingerprint_comparison.nlu == True: # replace", "config as cfg_loader if train_path: # If the train path", "fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, )", "path to the directory with the trained model files. \"\"\"", "for l in fingerprint_comparison.nlu if l not in nlu_untrainable] if", ") if fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu,", "NLU model ...\".format(lang), color=bcolors.OKBLUE) _, models[lang], _ = await rasa.nlu.train(", "output: Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text]", "Text, output_path: Text, force_training: bool, fixed_model_name: Optional[Text], persist_nlu_training_data: bool, additional_arguments:", "to be stored. uncompress: If `True` the model will not", "if training_datas.is_empty(): print_error( f\"Path '{nlu_data}' doesn't contain valid NLU data", ") new_fingerprint = await model.model_fingerprint(file_importer) old_model = model.get_latest_model(output_path) fingerprint_comparison =", "mod return await _train_async_internal( file_importer, train_path, output_path, force_training, fixed_model_name, persist_nlu_training_data,", "\"\"\" stories, nlu_data = await asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data() ) #", "fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict] = None, ) ->", "Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains a Rasa model", "domain file was found. Only an nlu-model was created.\" \"Please", "Optional[Text]: if loop is None: try: loop = asyncio.get_event_loop() except", "data in order to train \" \"a Rasa NLU model", "the train path was provided, do nothing on exit. _train_path", "with list of all langs fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain", "asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data() ) # if stories.is_empty() and nlu_data.is_empty(): #", "color=bcolors.OKBLUE, ) if fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name,", "path and clean it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp()))", "data present. Just a Rasa Core model will be trained.\")", "\" \"train a Rasa Core model using the '--stories' argument.\"", "are language codes training_files: Paths to the training data for", "was skipped because no valid domain file was found. \"", "change. \" \"Only the templates section has been changed. A", "TempDirectoryPath from rasa.cli.utils import ( print_success, print_warning, print_error, bcolors, print_color,", ") return None if not await file_importer.get_stories(): print_error( \"No stories", "data found for language <{}>, skipping training...\".format(lang), color=bcolors.OKBLUE) # </", "# fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments, # ) new_fingerprint = await model.model_fingerprint(file_importer)", "\"\"\"Trains a Core model. Args: domain: Path to the domain", "present. Just a Rasa Core model will be trained.\") #", "and NLU data in \" # \"order to train a", "\"Please specify a valid domain using '--domain' argument or check", "config, nlu_data, output, train_path, fixed_model_name, persist_nlu_training_data, ) ) async def", "the templates section has been changed. A new model with", "= await model.model_fingerprint(file_importer) old_model = model.get_latest_model(output_path) fingerprint_comparison = FingerprintComparisonResult(force_training=force_training) if", "a temp train path and clean it up on exit.", "fingerprint_comparison_result = FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data( file_importer, output=output_path, train_path=train_path,", "archive. \"\"\" # file_importer = TrainingDataImporter.load_from_config( # config, domain, training_files", "`train_async`. Args: file_importer: `TrainingDataImporter` which supplies the training data. train_path:", "the domain file. config: Dict of paths to the config", "print_color(\"NLU data for language <{}> didn't change, skipping training...\".format(lang), color=bcolors.OKBLUE)", "BotfrontFileImporter(config, domain, training_files) # domain = await file_importer.get_domain() # if", "in \" # \"order to train a Rasa model using", "Text, stories: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name:", "old model stored at '{}'.\" \"\".format(os.path.abspath(old_model)) ) return old_model async", "fingerprint_comparison_result: fingerprint_comparison_result = FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data( file_importer, output=output_path,", "async def _train_nlu_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] =", "Optional[Dict] = None, ): if not fingerprint_comparison_result: fingerprint_comparison_result = FingerprintComparisonResult()", "fingerprint_comparison = model.should_retrain( new_fingerprint, old_model, train_path ) # bf mod", "_train_nlu_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(", "using the '--data' argument.\" # ) # return # if", "await rasa.core.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments, ) print_color(\"Core", "None, ) -> Optional[Text]: \"\"\"Trains a Rasa model (Core and", "NLU model training will be skipped now.\" ) return return", "fingerprint_comparison.nlu == True: # replace True with list of all", "stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal (not compare) training print_color(\"Training Core model...\", color=bcolors.OKBLUE)", "to train the model. output_path: Output path. force_training: If `True`", "train_path, output_path, force_training, fixed_model_name, persist_nlu_training_data, additional_arguments, ) async def handle_domain_if_not_exists(", "with the model. Returns: If `train_path` is given it returns", "import os import tempfile from contextlib import ExitStack from typing", "additional_arguments: Optional[Dict] = None, loop: Optional[asyncio.AbstractEventLoop] = None, ) ->", "False, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains a", "data should be persisted with the model. fixed_model_name: Name of", "check if the provided domain file exists.\" ) return None", "return await _train_core_with_validated_data( # file_importer, # output=output_path, # fixed_model_name=fixed_model_name, #", "None, ): if not fingerprint_comparison_result: fingerprint_comparison_result = FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core():", "domain = await file_importer.get_domain() if domain.is_empty(): print_error( \"Core training was", "ExitStack() as stack: if train_path: # If the train path", "await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\", )", "training will be skipped now.\" ) return return await _train_nlu_with_validated_data(", "to the config file for Core. stories: Path to the", "the domain file. config: Path to the config file for", "async def _train_async_internal( file_importer: TrainingDataImporter, train_path: Text, output_path: Text, force_training:", "# ) # return # if nlu_data.is_empty(): # print_warning(\"No NLU", "not force_training: fingerprint_comparison = model.should_retrain( new_fingerprint, old_model, train_path ) #", "return loop.run_until_complete( train_core_async( domain=domain, config=config, stories=stories, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments,", "force_training: If `True` retrain model even if data has not", "Args: domain: Path to the domain file. config: Path to", "persisted with the model. fixed_model_name: Name of model to be", "Core training since domain or stories are empty.\", color=bcolors.OKBLUE) for", "output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name, ) print_success( \"Nothing changed. You can use", "file_importer: TrainingDataImporter, output_path: Text, train_path: Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None,", "Text, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] =", "except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop.run_until_complete( train_async( domain=domain,", "did not change. \" \"Only the templates section has been", "from rasa_addons.importers import BotfrontFileImporter file_importer = BotfrontFileImporter(config, domain, training_files) #", "_train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod config = await file_importer.get_nlu_config(retrain_nlu)", "with ExitStack() as stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod", "path. force_training: If `True` retrain model even if data has", "print_warning(\"No NLU data present. Just a Rasa Core model will", ") # return # if nlu_data.is_empty(): # print_warning(\"No NLU data", "output=output_path, fixed_model_name=fixed_model_name ) print_warning( \"Core training was skipped because no", ") -> Optional[Text]: \"\"\"Trains a Rasa model (Core and NLU).", "data should be persisted with the model. Returns: If `train_path`", "file_importer = TrainingDataImporter.load_from_config( # config, domain, training_files # ) with", "stories.is_empty() and nlu_data.is_empty(): # print_error( # \"No training data given.", "path. train_path: If `None` the model will be trained in", "additional_arguments: Additional training parameters. Returns: If `train_path` is given it", "from rasa.utils.common import TempDirectoryPath from rasa.cli.utils import ( print_success, print_warning,", "additional_arguments: Additional training parameters. Returns: Path of the trained model", "return loop.run_until_complete( _train_nlu_async( config, nlu_data, output, train_path, fixed_model_name, persist_nlu_training_data, )", "# \"No training data given. Please provide stories and NLU", "\"\"\"Train NLU with validated training and config data.\"\"\" import rasa.nlu.train", "False, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments:", "file_importer, # output=output_path, # fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments, # ) new_fingerprint", "file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, ) else: print_color( \"NLU", "use the old model stored at '{}'.\" \"\".format(os.path.abspath(old_model)) ) return", "even if data has not changed. fixed_model_name: Name of model", "archive. \"\"\" stories, nlu_data = await asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data() )", "= train_path else: # Otherwise, create a temp train path", "import ExitStack from typing import Text, Optional, List, Union, Dict", "= None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False,", "and NLU. output_path: Output path. force_training: If `True` retrain model", "mod > if fingerprint_comparison.nlu == True: # replace True with", "rasa.core.domain import Domain from rasa.utils.common import TempDirectoryPath from rasa.cli.utils import", "print_color(\"Training Core model...\", color=bcolors.OKBLUE) domain, config = await asyncio.gather( file_importer.get_domain(),", "fixed_model_name: Optional[Text], persist_nlu_training_data: bool, additional_arguments: Optional[Dict], ) -> Optional[Text]: \"\"\"Trains", "stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod config = await file_importer.get_nlu_config(retrain_nlu) for lang", "mod from rasa_addons.importers import BotfrontFileImporter file_importer = BotfrontFileImporter(config, domain, training_files)", "was created.\" \"Please specify a valid domain using '--domain' argument", "-> Optional[Text]: \"\"\"Trains a Rasa model (Core and NLU). Args:", "= False, retrain_nlu: Union[bool, List[Text]] = True ) -> Optional[Text]:", "False, additional_arguments: Optional[Dict] = None, loop: Optional[asyncio.AbstractEventLoop] = None, )", "# \"order to train a Rasa model using the '--data'", "temporary directory, otherwise in the provided directory. fixed_model_name: Name of", "def train_core_async( domain: Union[Domain, Text], config: Text, stories: Text, output:", "output_path, force_training, fixed_model_name, persist_nlu_training_data, additional_arguments, ) async def handle_domain_if_not_exists( file_importer:", "Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text] =", "= DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text] = None,", "import config as cfg_loader if train_path: # If the train", "await model.model_fingerprint(file_importer) old_model = model.get_latest_model(output_path) fingerprint_comparison = FingerprintComparisonResult(force_training=force_training) if not", "parameters. Returns: Path of the trained model archive. \"\"\" stories,", "config=config, stories=stories, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) ) async def", "= model.get_latest_model(output_path) fingerprint_comparison = FingerprintComparisonResult(force_training=force_training) if not force_training: fingerprint_comparison =", "/bf mod return await _train_async_internal( file_importer, train_path, output_path, force_training, fixed_model_name,", "persist_nlu_training_data=persist_nlu_training_data, ) else: print_color( \"NLU data/configuration did not change. No", "file was found. \" \"Please specify a valid domain using", "file_importer.get_nlu_config(retrain_nlu) for lang in config: if config[lang]: print_color(\"Start training {}", "Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Train Core with validated", "not core_untrainable fingerprint_comparison.nlu = [l for l in fingerprint_comparison.nlu if", "change. No need to retrain NLU model.\", color=bcolors.OKBLUE, ) def", "return await _train_nlu_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, ) async", ") print_success( \"Nothing changed. You can use the old model", "retrain_nlu: Union[bool, List[Text]] = True ) -> Optional[Text]: \"\"\"Train NLU", "\"\"\" loop = asyncio.get_event_loop() return loop.run_until_complete( _train_nlu_async( config, nlu_data, output,", "because no valid domain file was found. \" \"Please specify", "NLU model. Args: config: Path to the config file for", "None, ) -> Optional[Text]: if loop is None: try: loop", "not changed. persist_nlu_training_data: `True` if the NLU training data should", "in nlu_untrainable] if core_untrainable: print_color(\"Skipping Core training since domain or", "config: Path to the config file for NLU. nlu_data: Path", "additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains a Rasa", "Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data:", "None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: loop =", "stories=stories, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) ) async def train_core_async(", "# ) with ExitStack() as stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) #", "print_color(\"Start training {} NLU model ...\".format(lang), color=bcolors.OKBLUE) _, models[lang], _", "if the provided domain file exists.\" ) return None if", "print_error( \"No stories given. Please provide stories in order to", "bool = False, retrain_nlu: Union[bool, List[Text]] = True ) ->", "domain file. config: Path to the config file for Core.", "if not nlu_data: print_error( \"No NLU data given. Please provide", "config: Text, training_files: Union[Text, List[Text]], output: Text = DEFAULT_MODELS_PATH, force_training:", "training_files: Optional[Union[Text, List[Text]]], output_path: Text = DEFAULT_MODELS_PATH, force_training: bool =", ") with ExitStack() as stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf", "def _train_async_internal( file_importer: TrainingDataImporter, train_path: Text, output_path: Text, force_training: bool,", "if fingerprint_comparison.nlu == True: # replace True with list of", "changed. You can use the old model stored at '{}'.\"", "to the config file for NLU. nlu_data: Path to the", "persist_nlu_training_data: bool = False, retrain_nlu: Union[bool, List[Text]] = True )", "= BotfrontFileImporter(config, domain, training_files) # domain = await file_importer.get_domain() #", "_train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal (not compare) training print_color(\"Training Core", "bool = False, ) -> Optional[Text]: \"\"\"Trains an NLU model.", "not change. No need to retrain NLU model.\", color=bcolors.OKBLUE, )", "need to retrain NLU model.\", color=bcolors.OKBLUE, ) def train_core( domain:", "nlu_data: Path to the NLU training data. output: Output path.", "provide stories in order to \" \"train a Rasa Core", "it. \" \"Please verify the data format. \" \"The NLU", "train_path) else: print_color( \"Core stories/configuration did not change. No need", "model.\", color=bcolors.OKBLUE, ) def train_core( domain: Union[Domain, Text], config: Text,", "= None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Train", "Name of the model to be stored. persist_nlu_training_data: `True` if", "config: Dict[Text, Text], training_files: Optional[Union[Text, List[Text]]], output_path: Text = DEFAULT_MODELS_PATH,", "train_nlu( config: Text, nlu_data: Text, output: Text, train_path: Optional[Text] =", "None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains a", "\"Only the templates section has been changed. A new model", "Only Core was trained. new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model(", "in a temporary directory, otherwise in the provided directory. fixed_model_name:", "None, ) -> Optional[Text]: loop = asyncio.get_event_loop() return loop.run_until_complete( train_core_async(", "trained new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path,", "NLU). Args: domain: Path to the domain file. config: Dict", "fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, ) ->", "the training data. train_path: Directory in which to train the", "(Core and NLU). Args: domain: Path to the domain file.", "training_files # ) with ExitStack() as stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp()))", "fixed_model_name, persist_nlu_training_data, ) ) async def _train_nlu_async( config: Text, nlu_data:", "output_path: Text, force_training: bool, fixed_model_name: Optional[Text], persist_nlu_training_data: bool, additional_arguments: Optional[Dict],", "Core model using the '--stories' argument.\" ) return return await", "print_color(\"No NLU data found for language <{}>, skipping training...\".format(lang), color=bcolors.OKBLUE)", "if fingerprint_comparison.is_training_required(): await _do_training( file_importer, output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data,", "None, ) -> Optional[Text]: \"\"\"Trains a Core model. Args: domain:", "was found. \" \"Please specify a valid domain using '--domain'", "if nlu_data.is_empty(): # print_warning(\"No NLU data present. Just a Rasa", "file. config: Dict of paths to the config for Core", "Text, train_path: Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None, fixed_model_name: Optional[Text] =", "Core and NLU. Keys are language codes training_files: Paths to", "Name of model to be stored. persist_nlu_training_data: `True` if the", "domain file exists.\" ) return nlu_model_only async def _train_async_internal( file_importer:", "color=bcolors.OKBLUE, ) await model.update_model_with_new_domain(file_importer, train_path) else: print_color( \"Core stories/configuration did", "print_warning, print_error, bcolors, print_color, ) from rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME", "Text], config: Text, stories: Text, output: Text, train_path: Optional[Text] =", "Optional[Text] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool =", "print_warning( \"Core training was skipped because no valid domain file", "valid NLU data in it. \" \"Please verify the data", "Optional[Text] = None, fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict] =", "await _do_training( file_importer, output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, )", "file_importer = TrainingDataImporter.load_core_importer_from_config( config, domain, [stories] ) domain = await", ") return return await _train_core_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments,", "def _train_nlu_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None,", "Optional[Text]: \"\"\"Trains an NLU model. Args: config: Path to the", "model to be stored. additional_arguments: Additional training parameters. Returns: Path", "only hence the training files still have to be selected", "persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) ) async def train_async( domain: Union[Domain, Text],", "using the '--nlu' argument.\" ) return # training NLU only", "config: Dict of paths to the config for Core and", "model.model_fingerprint(file_importer) old_model = model.get_latest_model(output_path) fingerprint_comparison = FingerprintComparisonResult(force_training=force_training) if not force_training:", "file_importer, train_path, output_path, force_training, fixed_model_name, persist_nlu_training_data, additional_arguments, ) async def", ") await model.update_model_with_new_domain(file_importer, train_path) else: print_color( \"Core stories/configuration did not", "train_path ) # bf mod > if fingerprint_comparison.nlu == True:", "file_importer.get_domain() # if domain.is_empty(): # return await handle_domain_if_not_exists( # file_importer,", "await _train_nlu_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, ) async def", "fingerprint_comparison.core = fingerprint_comparison.core and not core_untrainable fingerprint_comparison.nlu = [l for", "the model archive, otherwise the path to the directory with", "additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Train Core with", "train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict]", "Union[Domain, Text], config: Dict[Text, Text], training_files: Optional[Union[Text, List[Text]]], output_path: Text", "print_error( \"Core training was skipped because no valid domain file", "training...\".format(lang), color=bcolors.OKBLUE) # /bf mod print_color(\"NLU model training completed.\", color=bcolors.OKBLUE)", "[stories] ) domain = await file_importer.get_domain() if domain.is_empty(): print_error( \"Core", "config[lang]: print_color(\"Start training {} NLU model ...\".format(lang), color=bcolors.OKBLUE) _, models[lang],", "_do_training( file_importer: TrainingDataImporter, output_path: Text, train_path: Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult] =", "model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\", ) return _train_path def", "if loop is None: try: loop = asyncio.get_event_loop() except RuntimeError:", "import tempfile from contextlib import ExitStack from typing import Text,", "rasa.nlu.train with ExitStack() as stack: models = {} from rasa.nlu", "return return await _train_nlu_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, )", "be trained in a temporary directory, otherwise in the provided", "Args: config: Path to the config file for NLU. nlu_data:", "file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text]", "TrainingDataImporter.load_core_importer_from_config( config, domain, [stories] ) domain = await file_importer.get_domain() if", "Path to the Core training data. output: Output path. train_path:", "file_importer.get_domain(), file_importer.get_config() ) await rasa.core.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config,", "color=bcolors.OKBLUE) domain, config = await asyncio.gather( file_importer.get_domain(), file_importer.get_config() ) await", "else: print_color( \"Core stories/configuration did not change. No need to", "train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) ) async def train_core_async( domain: Union[Domain,", "and NLU. Keys are language codes training_files: Paths to the", "Rasa NLU model using the '--nlu' argument.\" ) return #", "print_error, bcolors, print_color, ) from rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def", "Path of the trained model archive. \"\"\" stories, nlu_data =", "data for language <{}> didn't change, skipping training...\".format(lang), color=bcolors.OKBLUE) #", "None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, )", "should be persisted with the model. Returns: If `train_path` is", "from rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def train( domain: Text, config:", "config data.\"\"\" import rasa.nlu.train with ExitStack() as stack: models =", "provided, do nothing on exit. _train_path = train_path else: #", "= TrainingDataImporter.load_core_importer_from_config( config, domain, [stories] ) domain = await file_importer.get_domain()", "order to \" \"train a Rasa Core model using the", "# additional_arguments=additional_arguments, # ) new_fingerprint = await model.model_fingerprint(file_importer) old_model =", "asyncio.set_event_loop(loop) return loop.run_until_complete( train_async( domain=domain, config=config, training_files=training_files, output_path=output, force_training=force_training, fixed_model_name=fixed_model_name,", "file for Core. stories: Path to the Core training data.", "domain, config = await asyncio.gather( file_importer.get_domain(), file_importer.get_config() ) await rasa.core.train(", "to the model archive, otherwise the path to the directory", "NLU data in it. \" \"Please verify the data format.", ") ) async def train_async( domain: Union[Domain, Text], config: Dict[Text,", "await file_importer.get_domain() if domain.is_empty(): print_error( \"Core training was skipped because", "NLU only hence the training files still have to be", "async def train_async( domain: Union[Domain, Text], config: Dict[Text, Text], training_files:", ") # bf mod > if fingerprint_comparison.nlu == True: #", "train_path: Directory in which to train the model. output_path: Output", "files still have to be selected file_importer = TrainingDataImporter.load_nlu_importer_from_config( config,", "asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop.run_until_complete( train_async( domain=domain, config=config, training_files=training_files, output_path=output, force_training=force_training,", "training_files: Union[Text, List[Text]], output: Text = DEFAULT_MODELS_PATH, force_training: bool =", "-> Optional[Text]: if loop is None: try: loop = asyncio.get_event_loop()", "Rasa model (Core and NLU). Args: domain: Path to the", "Path of the trained model archive. \"\"\" # file_importer =", "return await _train_async_internal( file_importer, train_path, output_path, force_training, fixed_model_name, persist_nlu_training_data, additional_arguments,", "the config file for NLU. nlu_data: Path to the NLU", "new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name,", "_do_training( file_importer, output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) return", "Optional[FingerprintComparisonResult] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool =", ") from rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def train( domain: Text,", "was skipped because no valid domain file was found. Only", "train path was provided, do nothing on exit. _train_path =", "all langs fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain = await file_importer.get_domain()", "not in nlu_untrainable] if core_untrainable: print_color(\"Skipping Core training since domain", "await file_importer.get_domain() # if domain.is_empty(): # return await handle_domain_if_not_exists( #", "`train_path` is given it returns the path to the model", "return # if nlu_data.is_empty(): # print_warning(\"No NLU data present. Just", "await file_importer.get_stories(): print_error( \"No stories given. Please provide stories in", "Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] =", "up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal (not compare)", "<{}> didn't change, skipping training...\".format(lang), color=bcolors.OKBLUE) # /bf mod print_color(\"NLU", "nlu-model was created.\" \"Please specify a valid domain using '--domain'", "force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) ) async def train_async( domain:", "bool = False, additional_arguments: Optional[Dict] = None, loop: Optional[asyncio.AbstractEventLoop] =", "file_importer: `TrainingDataImporter` which supplies the training data. train_path: Directory in", "training...\".format(lang), color=bcolors.OKBLUE) # </ bf mod if fingerprint_comparison.is_training_required(): await _do_training(", "data. output: Output path. train_path: If `None` the model will", ") -> Optional[Text]: \"\"\"Trains an NLU model. Args: config: Path", "if train_path is None: # Only Core was trained. new_fingerprint", "print_color( \"Core stories/configuration did not change. \" \"Only the templates", "def train( domain: Text, config: Text, training_files: Union[Text, List[Text]], output:", "additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: loop = asyncio.get_event_loop()", "fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, ) else: print_color( \"NLU data/configuration did not", "specify a valid domain using '--domain' argument or check if", "returns the path to the model archive, otherwise the path", "if data has not changed. persist_nlu_training_data: `True` if the NLU", "return model.package_model( fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name, ) print_success( \"Nothing changed.", "d in nlu_data.items() if d.is_empty()] fingerprint_comparison.core = fingerprint_comparison.core and not", "-> Optional[Text]: \"\"\"Train Core with validated training and config data.\"\"\"", "\"\"\"Trains a Rasa model (Core and NLU). Args: domain: Path", ") ) async def train_core_async( domain: Union[Domain, Text], config: Text,", "will be skipped now.\" ) return return await _train_nlu_with_validated_data( file_importer,", "training_datas.is_empty(): print_error( f\"Path '{nlu_data}' doesn't contain valid NLU data in", "core_untrainable fingerprint_comparison.nlu = [l for l in fingerprint_comparison.nlu if l", "force_training: bool = False, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool", ") print_color(\"Core model training completed.\", color=bcolors.OKBLUE) if train_path is None:", "NLU data given. Please provide NLU data in order to", "at '{}'.\" \"\".format(os.path.abspath(old_model)) ) return old_model async def _do_training( file_importer:", "otherwise the path to the directory with the trained model", "contextlib import ExitStack from typing import Text, Optional, List, Union,", "\"NLU data/configuration did not change. No need to retrain NLU", "domain: Path to the domain file. config: Dict of paths", "model. Args: config: Path to the config file for NLU.", "file_importer, output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) return model.package_model(", "output_path: Text, train_path: Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None, fixed_model_name: Optional[Text]", "train_core_async( domain: Union[Domain, Text], config: Text, stories: Text, output: Text,", "None: try: loop = asyncio.get_event_loop() except RuntimeError: loop = asyncio.new_event_loop()", "stored at '{}'.\" \"\".format(os.path.abspath(old_model)) ) return old_model async def _do_training(", "change. No need to retrain Core model.\", color=bcolors.OKBLUE, ) if", "with ExitStack() as stack: models = {} from rasa.nlu import", "to the training data for Core and NLU. output_path: Output", "= stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod config = await file_importer.get_nlu_config(retrain_nlu) for", ") ) async def _train_nlu_async( config: Text, nlu_data: Text, output:", "data. train_path: Directory in which to train the model. output_path:", "None, loop: Optional[asyncio.AbstractEventLoop] = None, ) -> Optional[Text]: if loop", "NLU. Keys are language codes training_files: Paths to the training", "file_importer.get_nlu_data() ) # if stories.is_empty() and nlu_data.is_empty(): # print_error( #", "loop.run_until_complete( train_core_async( domain=domain, config=config, stories=stories, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, )", "color=bcolors.OKBLUE) if train_path is None: # Only Core was trained.", "= [l for l, d in nlu_data.items() if d.is_empty()] fingerprint_comparison.core", "file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) elif fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core", ") async def train_async( domain: Union[Domain, Text], config: Dict[Text, Text],", "# return await _train_core_with_validated_data( # file_importer, # output=output_path, # fixed_model_name=fixed_model_name,", "output_path, fixed_model_name ): nlu_model_only = await _train_nlu_with_validated_data( file_importer, output=output_path, fixed_model_name=fixed_model_name", "'{nlu_data}' doesn't contain valid NLU data in it. \" \"Please" ]
[ "if prices[j]<=prices[i]: res.append(prices[i]-prices[j]) break if j==len(prices)-1: res.append(prices[i]) res.append(prices[-1]) return res", "in range(len(prices)): for j in range(i+1,len(prices)): if prices[j]<=prices[i]: res.append(prices[i]-prices[j]) break", "List[int]: res = [] for i in range(len(prices)): for j", "in range(i+1,len(prices)): if prices[j]<=prices[i]: res.append(prices[i]-prices[j]) break if j==len(prices)-1: res.append(prices[i]) res.append(prices[-1])", "j in range(i+1,len(prices)): if prices[j]<=prices[i]: res.append(prices[i]-prices[j]) break if j==len(prices)-1: res.append(prices[i])", "-> List[int]: res = [] for i in range(len(prices)): for", "Solution: def finalPrices(self, prices: List[int]) -> List[int]: res = []", "class Solution: def finalPrices(self, prices: List[int]) -> List[int]: res =", "[] for i in range(len(prices)): for j in range(i+1,len(prices)): if", "prices: List[int]) -> List[int]: res = [] for i in", "range(len(prices)): for j in range(i+1,len(prices)): if prices[j]<=prices[i]: res.append(prices[i]-prices[j]) break if", "List[int]) -> List[int]: res = [] for i in range(len(prices)):", "range(i+1,len(prices)): if prices[j]<=prices[i]: res.append(prices[i]-prices[j]) break if j==len(prices)-1: res.append(prices[i]) res.append(prices[-1]) return", "<gh_stars>1-10 class Solution: def finalPrices(self, prices: List[int]) -> List[int]: res", "finalPrices(self, prices: List[int]) -> List[int]: res = [] for i", "def finalPrices(self, prices: List[int]) -> List[int]: res = [] for", "i in range(len(prices)): for j in range(i+1,len(prices)): if prices[j]<=prices[i]: res.append(prices[i]-prices[j])", "for i in range(len(prices)): for j in range(i+1,len(prices)): if prices[j]<=prices[i]:", "res = [] for i in range(len(prices)): for j in", "= [] for i in range(len(prices)): for j in range(i+1,len(prices)):", "for j in range(i+1,len(prices)): if prices[j]<=prices[i]: res.append(prices[i]-prices[j]) break if j==len(prices)-1:" ]
[ "self._context['is_redraw'] = False self._is_multi = len(sources) > 1 if not", "self._vim.command('vertical resize ' + str(winwidth)) if not is_current_buffer: self._vim.call('win_gotoid', restore)", "= ( 'Normal:' + self._context['highlight_window_background'] ) self._vim.command(('syntax match deniteSelectedLine /^[%s].*/'", "% ( self._context['selected_icon'])) if self._denite: self._denite.init_syntax(self._context, self._is_multi) def _update_candidates(self) ->", "self._context['input'] = '' self._quit_buffer() self._init_denite() self._gather_candidates() self._init_buffer() self._update_candidates() self._update_buffer() def", "self._context['highlight_window_background'] ) self._vim.command(('syntax match deniteSelectedLine /^[%s].*/' + ' contains=deniteConcealedMark') %", "vim: Nvim) -> None: self._vim = vim self._denite: typing.Optional[SyncParent] =", "def _do_immediately(self) -> None: goto = self._winid > 0 and", "self._vim.call('bufnr', '%'), True, { 'relative': 'cursor', 'row': row, 'col': 0,", "= len(sources) > 1 if not sources: # Ignore empty", "self._winid}) prev_linenr = self._vim.call('line', '.') prev_candidate = self._get_cursor_candidate() buffer =", "str: source_names = self._context['source_names'] if not self._is_multi or source_names ==", "in self._timers: self._timers.pop(key) def _split_floating(self, split: str) -> None: #", "if init_pos['anchor'] == 'NW': winpos = self._vim.call('nvim_win_get_position', self._winid) filter_row =", "cursor_candidate = self._get_cursor_candidate() self._init_buffer() self.redraw(False) if cursor_candidate == self._get_candidate(prev_cursor): #", "self._prev_wininfo: # Note: execute restcmd twice to restore layout properly", "return self._candidates[pos - 1] def _get_selected_candidates(self) -> Candidates: if not", "== 'vertical' else '' command = ( 'buffer' if split", "row -= self._winheight self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'editor', 'row': row,", "-1 if self._matched_pattern != '': self._matched_range_id = self._vim.call( 'matchadd', 'deniteMatchedRange',", "\" + \"%{denite#get_status('sources')} \" + \" %{denite#get_status('path')}%*\" + \"%{\" +", "= [] self.redraw(action['is_redraw']) if is_manual and self._context['sources_queue']: self._context['input'] = ''", "winheight filter_col = self._context['wincol'] else: init_pos = self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config',", "or (is_current_buffer and self._previous_text != self._context['input'])) if self._updated and is_changed:", "return winids = self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if winids: # Quit filter", "key not in self._timers: return self._vim.call('timer_stop', self._timers[key]) # Note: After", "'foldenable', 'list', 'number', 'relativenumber', 'signcolumn', 'spell', 'winfixheight', 'wrap', } for", "not is_current_buffer: self._vim.call('win_gotoid', restore) def _check_do_option(self) -> bool: if self._context['do']", "max_source_name_len self._context['max_source_name_format'] = ( '{:<' + str(self._context['max_source_name_len']) + '}') self._displayed_texts", "= candidates if command != '': self._vim.command(command) if is_quit and", "def _get_max_height(self) -> int: return int(self._vim.options['lines']) if not self._floating else", "self._floating: # Move the window to bottom self._vim.command('wincmd J') self._winrestcmd", "= -1 self._winrestcmd = '' self._initialized = False self._winheight =", "else: self._update_status() self._context['is_redraw'] = False def quit(self) -> None: if", "self._vim.current.buffer.options['filetype'] != 'denite': self._titlestring = self._vim.options['titlestring'] command = 'edit' if", "int(self._vim.options['cmdheight'])) def _resize_buffer(self, is_current_buffer: bool) -> None: split = self._context['split']", "def _move_to_last_line(self) -> None: self._cursor = len(self._candidates) def _start_timer(self, key:", "if not self._is_async and self._context['auto_resize']: winminheight = self._context['winminheight'] max_height =", "= not [x for x in self._displayed_texts if self._vim.call('strwidth', x)", "is_current_buffer: self._vim.call('win_gotoid', save_winid) elif is_current_buffer: self._vim.call('cursor', [prev_linenr, 0]) if is_current_buffer:", "= False options['buftype'] = 'nofile' options['bufhidden'] = 'delete' options['swapfile'] =", "= status self._prev_status = status linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']:", "# Restore the window if self._context['split'] == 'no': self._switch_prev_buffer() for", "candidates_len > max_height: self._winheight = max_height elif candidates_len != self._winheight:", "-1 if self._matched_char_id in matches: self._vim.call('matchdelete', self._matched_char_id, self._winid) self._matched_char_id =", "def _cleanup(self) -> None: self._stop_timer('update_candidates') self._stop_timer('update_buffer') if self._vim.current.buffer.number == self._bufnr:", "self._filter_floating = True elif split != 'no': command = self._get_direction()", "elif split != 'no': command = self._get_direction() command += '", "self._get_wininfo() == self._prev_wininfo: # Note: execute restcmd twice to restore", "self._displayed_texts if self._vim.call('strwidth', x) > winwidth] if direction == 'dynamictop':", "filter_col, }) self._vim.command('resize ' + str(winheight)) if self._context['reversed']: self._vim.command('normal! zb')", "_check_move_option(self) -> None: if self._context['cursor_pos'].isnumeric(): self._cursor = int(self._context['cursor_pos']) + 1", "[ x['word'] for x in self._candidates] buffer[:] = self._displayed_texts buffer.options['modifiable']", "/^[%s].*/' + ' contains=deniteConcealedMark') % ( self._context['selected_icon'])) self._vim.command(('syntax match deniteConcealedMark", "= self._vim.call('line', '.') prev_candidate = self._get_cursor_candidate() buffer = self._vim.buffers[self._bufnr] buffer.options['modifiable']", "self._get_wininfo() and self._get_wininfo() == self._prev_wininfo: # Note: execute restcmd twice", "# if hasattr(self._vim, 'run_coroutine'): # self._denite = ASyncParent(self._vim) # else:", "the position self._vim.call('setpos', '.', self._prev_curpos) if self._get_wininfo() and self._get_wininfo() ==", "context: UserContext) -> None: from denite.ui.map import do_map self._vim.command('silent! autocmd!", "self._vim.command('normal! zb') elif is_vertical and self._vim.current.window.width != winwidth: self._vim.command('vertical resize", "'run_coroutine'): # self._denite = ASyncParent(self._vim) # else: self._denite = SyncParent(self._vim)", "= self._vim.call( 'matchadd', 'deniteMatchedRange', r'\\c' + regex_convert_py_vim(self._matched_pattern), 10, -1, {'window':", "self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'], }) elif split == 'floating_relative_cursor':", "'' self._ruler = False self._prev_action = '' self._prev_status: typing.Dict[str, typing.Any]", "for x in self._candidates] buffer[:] = self._displayed_texts buffer.options['modifiable'] = False", "self._context['winwidth'] height = self._context['winheight'] if opened_pos + height + 3", "typing.List[int] = [] self._candidates: Candidates = [] self._cursor = 0", "= self._context['encoding'] abbr = candidate.get('abbr', candidate['word']).encode( encoding, errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])])", "self._vim.call('win_gotoid', self._prev_winid) # Restore the position self._vim.call('setpos', '.', self._prev_curpos) if", "winwidth, 'height': winheight, }) filter_row = 0 if wincol ==", "'aboveleft' if is_fit else 'topleft' else: direction = 'belowright' if", "in self._candidates] buffer[:] = self._displayed_texts buffer.options['modifiable'] = False self._previous_text =", "'height': height, 'anchor': anchor, }) elif split == 'floating_relative_window': self._vim.call(", "resume = self._initialized and context['resume'] if resume: # Skip the", "'line_total': len(self._candidates), } if status == self._prev_status: return self._bufvars['denite_statusline'] =", "self._update_displayed_texts() prev_matched_pattern = self._matched_pattern self._matched_pattern = pattern prev_statusline_sources = self._statusline_sources", "empty sources. error(self._vim, 'Empty sources') return self._init_denite() self._gather_candidates() self._update_candidates() self._init_cursor()", "+ ' conceal contained') % ( self._context['selected_icon'])) if self._denite: self._denite.init_syntax(self._context,", "self._quit_buffer() def _cleanup(self) -> None: self._stop_timer('update_candidates') self._stop_timer('update_buffer') if self._vim.current.buffer.number ==", "= self._displayed_texts self._update_displayed_texts() prev_matched_pattern = self._matched_pattern self._matched_pattern = pattern prev_statusline_sources", "\"current.window.options\" changes global value instead of local in # neovim.", "in self._vim.vars['denite#_previewed_buffers'].keys(): if not self._vim.call('win_findbuf', bufnr): self._vim.command('silent bdelete ' +", "prev_displayed_texts = self._displayed_texts self._update_displayed_texts() prev_matched_pattern = self._matched_pattern self._matched_pattern = pattern", "link ' + 'deniteStatusLinePath Comment') self._vim.command('highlight default link ' +", "}) filter_col = init_pos['col'] if init_pos['anchor'] == 'NW': winpos =", "self._floating: self._split_floating(split) elif self._context['filter_split_direction'] == 'floating': self._filter_floating = True elif", "-> str: source_names = self._context['source_names'] if not self._is_multi or source_names", "bool(self._vim.call( 'denite#util#check_matchdelete')) def start(self, sources: typing.List[typing.Any], context: UserContext) -> typing.List[typing.Any]:", "or not action_name: return self._prev_action = action_name action = self._denite.get_action(", "Default(object): @property def is_async(self) -> bool: return self._is_async def __init__(self,", "_quit_buffer(self) -> None: self._cleanup() if self._vim.call('bufwinnr', self._bufnr) < 0: #", "'CursorMoved <buffer> ' 'call denite#call_map(\"auto_action\")') self._init_syntax() def _switch_buffer(self) -> None:", "1 else row + winheight filter_col = self._context['wincol'] else: init_pos", "x['word'] for x in self._candidates] buffer[:] = self._displayed_texts buffer.options['modifiable'] =", "_cleanup(self) -> None: self._stop_timer('update_candidates') self._stop_timer('update_buffer') if self._vim.current.buffer.number == self._bufnr: self._cursor", "restore = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) if not is_vertical and self._vim.current.window.height", "pos: int) -> Candidate: if not self._candidates or pos >", "UserContext) -> None: if not context['sources_queue']: return self._sources_history.append({ 'sources': context['sources_queue'][0],", "self._statusline_sources = ' '.join(statuses) if self._is_async: self._start_timer('update_candidates') else: self._stop_timer('update_candidates') updated", "previous window self._vim.command('wincmd p') def _do_command(self, command: str) -> None:", "and self._context['auto_resize']: winminheight = self._context['winminheight'] max_height = min(self._context['winheight'], self._get_max_height()) if", "name) else name[:2]) source_name = short_name if source_names == 'short'", "+ 'deniteStatusLineNumber LineNR') self._vim.command('highlight default link ' + 'deniteSelectedLine Statement')", "x in self._candidates]) self._context['max_source_name_len'] = max_source_name_len self._context['max_source_name_format'] = ( '{:<'", "self._winid, { 'relative': 'win', 'win': init_pos['win'], 'row': init_pos['row'], 'col': init_pos['col'],", "if self._context['search'] and self._context['input']: self._vim.call('setreg', '/', self._context['input']) return self._updated def", "'floating_relative_cursor', 'floating_relative_window', ] self._filter_floating = False if self._vim.current.buffer.options['filetype'] != 'denite':", "status self._prev_status = status linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']: if", "prev_cursor = self._cursor cursor_candidate = self._get_cursor_candidate() self._init_buffer() self.redraw(False) if cursor_candidate", "self._displayed_texts = [ self._get_candidate_display_text(i) for i in range(0, candidates_len) ]", "context['path'] = self._context['path'] def _start(self, sources: typing.List[typing.Any], context: UserContext) ->", "'dynamicbottom': self._update_displayed_texts() winwidth = self._vim.call('winwidth', 0) is_fit = not [x", "if not context['sources_queue']: return self._sources_history.append({ 'sources': context['sources_queue'][0], 'path': context['path'], })", "self._ruler = False self._prev_action = '' self._prev_status: typing.Dict[str, typing.Any] =", "split: str) -> None: # Use floating window if split", "'floating': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'editor', 'row':", "(str(self._context['selected_icon']) if index in self._selected_candidates else ' ') + '", "3 > self._vim.options['lines']: anchor = 'SW' row = 0 self._context['filter_winrow']", "self._previous_text != self._context['input'])) if self._updated and is_changed: if not is_current_buffer:", "r'\\c' + regex_convert_py_vim(self._matched_pattern), 10, -1, {'window': self._winid}) matched_char_pattern = '[{}]'.format(re.sub(", "_get_display_source_name(self, name: str) -> str: source_names = self._context['source_names'] if not", "= -1 self._winid = -1 self._winrestcmd = '' self._initialized =", "self._denite or not candidates or not action_name: return self._prev_action =", "return self._floating = split in [ 'floating', 'floating_relative_cursor', 'floating_relative_window', ]", "-> str: direction = str(self._context['direction']) if direction == 'dynamictop' or", "'dynamictop': direction = 'aboveleft' if is_fit else 'topleft' else: direction", "redraw(self, is_force: bool = True) -> None: self._context['is_redraw'] = is_force", "< winminheight): self._winheight = winminheight elif candidates_len > max_height: self._winheight", "filter_winid = self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] = row if self._vim.call('win_id2win', filter_winid) >", "return winheight = max(self._winheight, 1) winwidth = max(self._winwidth, 1) is_vertical", "opened_pos + height + 3 > self._vim.options['lines']: anchor = 'SW'", "1) else: width = self._context['winwidth'] height = self._context['winheight'] if opened_pos", "= True self._vim.vars['denite#_candidates'] = [ x['word'] for x in self._candidates]", "str(self._context['direction']) if direction == 'dynamictop' or direction == 'dynamicbottom': self._update_displayed_texts()", "After timer_stop is called, self._timers may be removed if key", "'open': # Re-open denite buffer prev_cursor = self._cursor cursor_candidate =", "= False if not is_quit and is_manual: self._selected_candidates = []", "self._winwidth = self._vim.current.window.width self._bufvars = self._vim.current.buffer.vars self._bufvars['denite'] = { 'buffer_name':", "1 while cursor < len(self._candidates): self.do_action('default', command) self._move_to_next_line() self._quit_buffer() def", "else: if self._context['split'] == 'tab': self._vim.command('tabclose!') if self._context['split'] != 'tab':", "self._result = [] return def _restart(self) -> None: self._context['input'] =", "'signcolumn', 'spell', 'winfixheight', 'wrap', } for k in window_options: self._save_window_options[k]", "{} self._vim.command('highlight! link CursorLine CursorLine') if self._floating or self._filter_floating: self._vim.options['titlestring']", "10, -1, {'window': self._winid}) prev_linenr = self._vim.call('line', '.') prev_candidate =", "deniteMatchedRange ' + self._context['highlight_matched_range']) self._vim.command('highlight link deniteMatchedChar ' + self._context['highlight_matched_char'])", "init_pos['row'], 'col': init_pos['col'], 'width': winwidth, 'height': winheight, }) filter_col =", "self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'cursor', 'row': row,", "x in self._selected_candidates] def _init_denite(self) -> None: if self._denite: self._denite.start(self._context)", "return self._sources_history.append({ 'sources': context['sources_queue'][0], 'path': context['path'], }) self._start(context['sources_queue'][0], context) if", "'vertical' and not self._floating: # Move the window to bottom", "'editor', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'], })", "split == 'tab': self._vim.command('tabnew') elif self._floating: self._split_floating(split) elif self._context['filter_split_direction'] ==", "denite#call_map(\"auto_action\")') self._init_syntax() def _switch_buffer(self) -> None: split = self._context['split'] if", "self._context['input'] = '' self._context['quick_move'] = '' self._start_sources_queue(self._context) return def redraw(self,", "command != '': self._vim.command(command) if is_quit and post_action == 'open':", "== self._bufnr: self._cursor = self._vim.call('line', '.') # Note: Close filter", "self._bufnr) def _stop_timer(self, key: str) -> None: if key not", "False self._timers: typing.Dict[str, int] = {} self._matched_range_id = -1 self._matched_char_id", "is_current_buffer: save_winid = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) self._init_cursor() self._move_to_pos(self._cursor) if not", "self._context['winheight'], }) elif split == 'floating_relative_cursor': opened_pos = (self._vim.call('nvim_win_get_position', 0)[0]", "if self._vim.current.buffer.number == self._bufnr: self._cursor = self._vim.call('line', '.') # Note:", "typing.Dict[str, int] = {} self._matched_range_id = -1 self._matched_char_id = -1", "self._floating: wincol = self._context['winrow'] row = wincol if split ==", "'floating_relative_window': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'win', 'row':", "else 'botright' return direction def _get_wininfo(self) -> typing.List[typing.Any]: return [", "== 'update_candidates': self._timers[key] = self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr) elif key ==", "import Nvim, UserContext, Candidates, Candidate from denite.parent import SyncParent class", "link deniteMatchedChar ' + self._context['highlight_matched_char']) self._vim.command('highlight default link ' +", "if self._matched_range_id in matches: self._vim.call('matchdelete', self._matched_range_id, self._winid) self._matched_range_id = -1", "if is_quit and post_action == 'open': # Re-open denite buffer", "self._context['filter_split_direction'] == 'floating': self._filter_floating = True elif split != 'no':", "direction = str(self._context['direction']) if direction == 'dynamictop' or direction ==", "self._matched_range_id, self._winid) self._matched_range_id = -1 if self._matched_char_id in matches: self._vim.call('matchdelete',", "= self._context['winminheight'] max_height = min(self._context['winheight'], self._get_max_height()) if (winminheight != -1", "self._vim.call('bufadd', bufname) vertical = 'vertical' if split == 'vertical' else", "index: int) -> str: source_names = self._context['source_names'] candidate = self._candidates[index]", "'.') prev_candidate = self._get_cursor_candidate() buffer = self._vim.buffers[self._bufnr] buffer.options['modifiable'] = True", "{'window': self._winid}) matched_char_pattern = '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace(' ', '')", "self._get_max_height()) if (winminheight != -1 and candidates_len < winminheight): self._winheight", "name: str) -> str: source_names = self._context['source_names'] if not self._is_multi", "'{:<' + str(self._context['max_source_name_len']) + '}') self._displayed_texts = [ self._get_candidate_display_text(i) for", "floating window if split == 'floating': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'),", "self._winid)] if self._matched_range_id in matches: self._vim.call('matchdelete', self._matched_range_id, self._winid) self._matched_range_id =", "colorcolumn=') self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal nofoldenable') self._vim.command('setlocal", "and post_action == 'open': # Re-open denite buffer prev_cursor =", "self._get_candidate(prev_cursor): # Restore the cursor self._move_to_pos(prev_cursor) # Disable quit flag", "-> Candidate: return self._get_candidate(self._cursor) def _get_candidate(self, pos: int) -> Candidate:", "self._cursor = pos if self._context['reversed']: self._vim.command('normal! zb') def _move_to_next_line(self) ->", "from denite.parent import SyncParent class Default(object): @property def is_async(self) ->", "and self._context['match_highlight']: matches = [x['id'] for x in self._vim.call('getmatches', self._winid)]", "self._previous_text = '' self._floating = False self._filter_floating = False self._updated", "split == 'vertical' else ' split' bufname = '[denite]-' +", "not in self._timers: return self._vim.call('timer_stop', self._timers[key]) # Note: After timer_stop", "self._init_cursor() cursor = 1 while cursor < len(self._candidates): self.do_action('default', command)", "return post_action = self._context['post_action'] is_quit = action['is_quit'] or post_action ==", "split == 'floating': if self._context['auto_resize'] and row > 1: row", "self._context['max_source_name_format'] = ( '{:<' + str(self._context['max_source_name_len']) + '}') self._displayed_texts =", "action_name: return self._prev_action = action_name action = self._denite.get_action( self._context, action_name,", "-= 1 def _move_to_first_line(self) -> None: self._cursor = 1 def", "= is_force if is_force: self._gather_candidates() if self._update_candidates(): self._update_buffer() else: self._update_status()", "options['bufhidden'] = 'delete' options['swapfile'] = False options['buflisted'] = False options['modeline']", "= self._context['split'] if (split == 'no' or split == 'tab'", "or self._statusline_sources != prev_statusline_sources) if updated: self._updated = True self._start_timer('update_buffer')", "if self._matched_pattern != '': self._matched_range_id = self._vim.call( 'matchadd', 'deniteMatchedRange', r'\\c'", "= self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar', winnr, '&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%* \" +", "height + opened_pos self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative':", "is_force: self._gather_candidates() if self._update_candidates(): self._update_buffer() else: self._update_status() self._context['is_redraw'] = False", "============================================================================ import re import typing from denite.util import echo, error,", "self._vim.current.window.width self._bufvars = self._vim.current.buffer.vars self._bufvars['denite'] = { 'buffer_name': self._context['buffer_name'], }", "self._initialized = False self._winheight = 0 self._winwidth = 0 self._winminheight", "self._vim.call('winwidth', 0) is_fit = not [x for x in self._displayed_texts", "= -1 self._is_multi = False self._is_async = False self._matched_pattern =", "len(self._candidates): self.do_action('default', command) self._move_to_next_line() self._quit_buffer() def _cleanup(self) -> None: self._stop_timer('update_candidates')", "-> None: self._cursor = len(self._candidates) def _start_timer(self, key: str) ->", "[]): return if self._context['start_filter']: do_map(self, 'open_filter_buffer', []) def _init_buffer(self) ->", "(winminheight != -1 and candidates_len < winminheight): self._winheight = winminheight", "def _switch_buffer(self) -> None: split = self._context['split'] if (split !=", "[x['id'] for x in self._vim.call('getmatches', self._winid)] if self._matched_range_id in matches:", "prev_linenr = self._vim.call('line', '.') prev_candidate = self._get_cursor_candidate() buffer = self._vim.buffers[self._bufnr]", "== 'tab' or self._vim.call('winnr', '$') == 1): return winheight =", "if self._context['split'] == 'no': self._switch_prev_buffer() for k, v in self._save_window_options.items():", "winwidth] if direction == 'dynamictop': direction = 'aboveleft' if is_fit", "#') else: self._vim.command('close!') def _quit_buffer(self) -> None: self._cleanup() if self._vim.call('bufwinnr',", "if self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd WinEnter') if self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd BufWinEnter')", "= candidates_len max_source_name_len = 0 if self._candidates: max_source_name_len = max([", "if not self._denite: return False [self._is_async, pattern, statuses, self._entire_len, self._candidates]", "[] self.redraw(action['is_redraw']) if is_manual and self._context['sources_queue']: self._context['input'] = '' self._context['quick_move']", "= min(self._context['winheight'], self._get_max_height()) if (winminheight != -1 and candidates_len <", "if self._cursor < len(self._candidates): self._cursor += 1 def _move_to_prev_line(self) ->", "self._vim.call('win_gotoid', self._winid) # Restore the window if self._context['split'] == 'no':", "+ inpt path = '[' + self._context['path'] + ']' status", "for x in self._selected_candidates] def _init_denite(self) -> None: if self._denite:", "self._denite: self._denite.gather_candidates(self._context) def _init_cursor(self) -> None: if self._context['reversed']: self._move_to_last_line() else:", "command line window. return resume = self._initialized and context['resume'] if", "= self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) self._init_cursor() self._move_to_pos(self._cursor) if not is_current_buffer: self._vim.call('win_gotoid',", "if self._context['start_filter']: do_map(self, 'open_filter_buffer', []) def _init_buffer(self) -> None: self._prev_status", "pattern prev_statusline_sources = self._statusline_sources self._statusline_sources = ' '.join(statuses) if self._is_async:", "if self._vim.call('bufwinnr', self._bufnr) < 0: # Denite buffer is already", "def _stop_timer(self, key: str) -> None: if key not in", "%s]/' + ' conceal contained') % ( self._context['selected_icon'])) if self._denite:", "self._get_cursor_candidate() buffer = self._vim.buffers[self._bufnr] buffer.options['modifiable'] = True self._vim.vars['denite#_candidates'] = [", "'denite' if self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd WinEnter') if self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd", "_do_command(self, command: str) -> None: self._init_cursor() cursor = 1 while", "self._gather_candidates() self._update_candidates() self._init_cursor() self._check_move_option() if self._check_do_option(): return self._init_buffer() self._update_displayed_texts() self._update_buffer()", "-> None: self._vim.command('syntax case ignore') self._vim.command('highlight default link deniteInput ModeMsg')", "changes global value instead of local in # neovim. self._vim.command('setlocal", "!= '': self._do_command(self._context['do']) return True elif (self._candidates and self._context['immediately'] or", "10, -1, {'window': self._winid}) matched_char_pattern = '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace('", "None: split = self._context['split'] if (split == 'no' or split", "-> None: candidates_len = len(self._candidates) if not self._is_async and self._context['auto_resize']:", "self._context['source_names'] if not self._is_multi or source_names == 'hide': source_name =", "Move to denite window self._vim.call('win_gotoid', self._winid) # Restore the window", "if self._context['cursor_pos'].isnumeric(): self._cursor = int(self._context['cursor_pos']) + 1 elif re.match(r'\\+\\d+', self._context['cursor_pos']):", "Candidate: return self._get_candidate(self._cursor) def _get_candidate(self, pos: int) -> Candidate: if", "if not self._is_multi or source_names == 'hide': source_name = ''", "'dynamictop' or direction == 'dynamicbottom': self._update_displayed_texts() winwidth = self._vim.call('winwidth', 0)", "typing.List[typing.Any]: return [ self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ] def _switch_prev_buffer(self)", "self._prev_curpos: typing.List[typing.Any] = [] self._save_window_options: typing.Dict[str, typing.Any] = {} self._sources_history:", "filter_col = init_pos['col'] if init_pos['anchor'] == 'NW': winpos = self._vim.call('nvim_win_get_position',", "self._updated = False self._timers: typing.Dict[str, int] = {} self._matched_range_id =", "self._vim.command('silent doautocmd FileType denite') if self._context['auto_action']: self._vim.command('autocmd denite ' 'CursorMoved", "( self._context['selected_icon'])) if self._denite: self._denite.init_syntax(self._context, self._is_multi) def _update_candidates(self) -> bool:", "position self._vim.call('setpos', '.', self._prev_curpos) if self._get_wininfo() and self._get_wininfo() == self._prev_wininfo:", "self._move_to_last_line() else: self._move_to_first_line() def _move_to_pos(self, pos: int) -> None: self._vim.call('cursor',", "height = self._context['winheight'] if opened_pos + height + 3 >", "'floating', 'floating_relative_window', 'floating_relative_cursor'] else 'sbuffer') self._vim.command( 'silent keepalt %s %s", "is_fit = not [x for x in self._displayed_texts if self._vim.call('strwidth',", "[prev_linenr, 0]) if is_current_buffer: if (self._context['auto_action'] and prev_candidate != self._get_cursor_candidate()):", "} if status == self._prev_status: return self._bufvars['denite_statusline'] = status self._prev_status", "self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif self._context['cursor_pos'] == '$':", "regex_convert_py_vim from denite.util import Nvim, UserContext, Candidates, Candidate from denite.parent", "self._vim.command('setlocal nolist') self._vim.command('setlocal nonumber') self._vim.command('setlocal norelativenumber') self._vim.command('setlocal nospell') self._vim.command('setlocal winfixheight')", "self._get_display_source_name(candidate['source_name']))) encoding = self._context['encoding'] abbr = candidate.get('abbr', candidate['word']).encode( encoding, errors='replace').decode(encoding,", "( 'buffer' if split in ['no', 'tab', 'floating', 'floating_relative_window', 'floating_relative_cursor']", "= pos if self._context['reversed']: self._vim.command('normal! zb') def _move_to_next_line(self) -> None:", "if wincol == 1 else row + winheight filter_col =", "= self._vim.current.buffer.number self._prev_curpos = self._vim.call('getcurpos') self._prev_wininfo = self._get_wininfo() self._prev_winid =", "if not sources: # Ignore empty sources. error(self._vim, 'Empty sources')", "statuses, self._entire_len, self._candidates] = self._denite.filter_candidates(self._context) prev_displayed_texts = self._displayed_texts self._update_displayed_texts() prev_matched_pattern", "self._context['cursor_pos'].isnumeric(): self._cursor = int(self._context['cursor_pos']) + 1 elif re.match(r'\\+\\d+', self._context['cursor_pos']): for", "= self._vim.call('win_getid') self._resize_buffer(True) self._winheight = self._vim.current.window.height self._winwidth = self._vim.current.window.width self._bufvars", "= self._vim.call( 'matchadd', 'deniteMatchedChar', matched_char_pattern, 10, -1, {'window': self._winid}) prev_linenr", "action['is_quit'] or post_action == 'quit' if is_quit: self.quit() self._denite.do_action(self._context, action_name,", "status = { 'input': inpt, 'sources': self._statusline_sources, 'path': path, #", "= short_name if source_names == 'short' else name return source_name", "[] self._candidates: Candidates = [] self._cursor = 0 self._entire_len =", "Note: Close filter window before preview window self._vim.call('denite#filter#_close_filter_window') if not", "'floating_relative_window', 'floating_relative_cursor'] else 'sbuffer') self._vim.command( 'silent keepalt %s %s %s", "self._vim.current.window.options[k] = v else: if self._context['split'] == 'tab': self._vim.command('tabclose!') if", "# Restore the cursor self._move_to_pos(prev_cursor) # Disable quit flag is_quit", "and self._winid > 0 and self._vim.call('win_gotoid', self._winid)): if split !=", "def _quit_buffer(self) -> None: self._cleanup() if self._vim.call('bufwinnr', self._bufnr) < 0:", "+ \"%{\" + linenr + \"}%*\") else: winnr = self._vim.call('win_id2win',", "inpt path = '[' + self._context['path'] + ']' status =", "self._vim.buffers[self._prev_bufnr].name == ''): self._vim.command('enew') else: self._vim.command('buffer ' + str(self._prev_bufnr)) def", "split = self._context['split'] if (split != 'no' and self._winid >", "' 'CursorMoved <buffer> ' 'call denite#call_map(\"auto_action\")') self._init_syntax() def _switch_buffer(self) ->", "self._updated = True self._start_timer('update_buffer') if self._context['search'] and self._context['input']: self._vim.call('setreg', '/',", "= self._context['source_names'] candidate = self._candidates[index] terms = [] if self._is_multi", "'col': init_pos['col'], 'width': winwidth, 'height': winheight, }) filter_col = init_pos['col']", "= 0 self._entire_len = 0 self._result: typing.List[typing.Any] = [] self._context:", "= self._get_cursor_candidate() self._init_buffer() self.redraw(False) if cursor_candidate == self._get_candidate(prev_cursor): # Restore", "{ 'colorcolumn', 'concealcursor', 'conceallevel', 'cursorcolumn', 'cursorline', 'foldcolumn', 'foldenable', 'list', 'number',", "the initialization update = ('immediately', 'immediately_1', 'cursor_pos', 'prev_winid', 'start_filter', 'quick_move')", "else: self._vim.command('setlocal signcolumn=auto') if self._context['cursorline']: self._vim.command('setlocal cursorline') options = self._vim.current.buffer.options", "for bufnr in self._vim.vars['denite#_previewed_buffers'].keys(): if not self._vim.call('win_findbuf', bufnr): self._vim.command('silent bdelete", "= { 'input': inpt, 'sources': self._statusline_sources, 'path': path, # Extra", "0 self._entire_len = 0 self._result: typing.List[typing.Any] = [] self._context: UserContext", "index in self._selected_candidates else ' ') + ' '.join(terms).replace('\\n', '')", "self._timers[key] = self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr) def _stop_timer(self, key: str) ->", "'[denite]-' + self._context['buffer_name'] if self._vim.call('exists', '*bufadd'): bufnr = self._vim.call('bufadd', bufname)", "== 'floating': if self._context['auto_resize'] and row > 1: row +=", "-> None: self._cleanup() if self._vim.call('bufwinnr', self._bufnr) < 0: # Denite", "self._prev_action = action_name action = self._denite.get_action( self._context, action_name, candidates) if", "self._check_do_option(): return self._init_buffer() if context['refresh']: self.redraw() self._move_to_pos(self._cursor) else: if self._context", "= max_height elif candidates_len != self._winheight: self._winheight = candidates_len max_source_name_len", "self._vim.command('highlight default link ' + 'deniteStatusLineNumber LineNR') self._vim.command('highlight default link", "(self._prev_bufnr == self._bufnr or self._vim.buffers[self._prev_bufnr].name == ''): self._vim.command('enew') else: self._vim.command('buffer", "self._update_buffer() self._move_to_pos(self._cursor) if self._context['quick_move'] and do_map(self, 'quick_move', []): return if", "'silent keepalt %s %s %s %s' % ( self._get_direction(), vertical,", "'matchadd', 'deniteMatchedChar', matched_char_pattern, 10, -1, {'window': self._winid}) prev_linenr = self._vim.call('line',", "= False self._filter_floating = False self._updated = False self._timers: typing.Dict[str,", "= action['is_quit'] or post_action == 'quit' if is_quit: self.quit() self._denite.do_action(self._context,", "def is_async(self) -> bool: return self._is_async def __init__(self, vim: Nvim)", "+ str(self._context['max_source_name_len']) + '}') self._displayed_texts = [ self._get_candidate_display_text(i) for i", "= [] self._previous_text = '' self._floating = False self._filter_floating =", "sources') return self._init_denite() self._gather_candidates() self._update_candidates() self._init_cursor() self._check_move_option() if self._check_do_option(): return", "= str(self._context['direction']) if direction == 'dynamictop' or direction == 'dynamicbottom':", "after set filetype option. self._vim.command('silent doautocmd FileType denite') if self._context['auto_action']:", "= self._displayed_texts buffer.options['modifiable'] = False self._previous_text = self._context['input'] self._resize_buffer(is_current_buffer) is_changed", "self._context['start_filter']: do_map(self, 'open_filter_buffer', []) def _init_buffer(self) -> None: self._prev_status =", "self._get_selected_candidates() elif self._get_cursor_candidate(): candidates = [self._get_cursor_candidate()] else: candidates = []", "'edit' if split == 'tab': self._vim.command('tabnew') elif self._floating: self._split_floating(split) elif", "source_name = '' else: short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name) if", "== '$': self._move_to_last_line() def _do_immediately(self) -> None: goto = self._winid", "self._winid > 0 and self._vim.call( 'win_gotoid', self._winid) if goto: #", "- int(self._vim.options['cmdheight'])) def _resize_buffer(self, is_current_buffer: bool) -> None: split =", "self._winid, { 'relative': 'editor', 'row': row, 'col': self._context['wincol'], 'width': winwidth,", "-1 self._winrestcmd = '' self._initialized = False self._winheight = 0", "False self._is_async = False self._matched_pattern = '' self._displayed_texts: typing.List[str] =", "+ str(winheight)) if self._context['reversed']: self._vim.command('normal! zb') elif is_vertical and self._vim.current.window.width", "to the previous window self._vim.command('wincmd p') def _do_command(self, command: str)", "0 self._winwidth = 0 self._winminheight = -1 self._is_multi = False", "= [] return def _restart(self) -> None: self._context['input'] = ''", "winpos = self._vim.call('nvim_win_get_position', self._winid) filter_row = winpos[0] + winheight filter_winid", "self._context['auto_action']: self._vim.command('autocmd denite ' 'CursorMoved <buffer> ' 'call denite#call_map(\"auto_action\")') self._init_syntax()", "self._updated and is_changed: if not is_current_buffer: save_winid = self._vim.call('win_getid') self._vim.call('win_gotoid',", "% ( self._context['selected_icon'])) self._vim.command(('syntax match deniteConcealedMark /^[ %s]/' + '", "typing.List[typing.Any], context: UserContext) -> typing.List[typing.Any]: if not self._denite: # if", "= [ self._get_candidate_display_text(i) for i in range(0, candidates_len) ] def", "[] self._previous_text = '' self._floating = False self._filter_floating = False", "self._is_multi or source_names == 'hide': source_name = '' else: short_name", "command: str) -> None: self._init_cursor() cursor = 1 while cursor", "None: if self._context['reversed']: self._move_to_last_line() else: self._move_to_first_line() def _move_to_pos(self, pos: int)", "= -1 if self._matched_pattern != '': self._matched_range_id = self._vim.call( 'matchadd',", "= '' return self._floating = split in [ 'floating', 'floating_relative_cursor',", "echo, error, clearmatch, regex_convert_py_vim from denite.util import Nvim, UserContext, Candidates,", "winids = self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if winids: # Quit filter buffer", "from denite.ui.map import do_map self._vim.command('silent! autocmd! denite') if re.search(r'\\[Command Line\\]$',", "name return source_name def _get_candidate_display_text(self, index: int) -> str: source_names", "Candidate: if not self._candidates or pos > len(self._candidates): return {}", "%s' % ( self._get_direction(), vertical, command, bufnr, ) ) else:", "self._result: typing.List[typing.Any] = [] self._context: UserContext = {} self._bufnr =", "not self._denite: return False [self._is_async, pattern, statuses, self._entire_len, self._candidates] =", "_init_syntax(self) -> None: self._vim.command('syntax case ignore') self._vim.command('highlight default link deniteInput", "execute restcmd twice to restore layout properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim)", "self._context['input']: inpt = self._context['input'] + ' ' if self._context['error_messages']: inpt", "self._timers[key] = self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr) elif key == 'update_buffer': self._timers[key]", "self._vim.command('close!') def _quit_buffer(self) -> None: self._cleanup() if self._vim.call('bufwinnr', self._bufnr) <", "# neovim. self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal nocursorcolumn')", "bufname) vertical = 'vertical' if split == 'vertical' else ''", "None: inpt = '' if self._context['input']: inpt = self._context['input'] +", "'[{}/{}] {}'.format( self._cursor, len(self._candidates), candidate.get('abbr', candidate['word']))) if goto: # Move", "range(0, candidates_len) ] def _update_buffer(self) -> None: is_current_buffer = self._bufnr", "= '[denite]-' + self._context['buffer_name'] if self._vim.call('exists', '*bufadd'): bufnr = self._vim.call('bufadd',", "if self._check_matchdelete and self._context['match_highlight']: matches = [x['id'] for x in", "not self._vim.call('win_findbuf', bufnr): self._vim.command('silent bdelete ' + str(bufnr)) self._vim.vars['denite#_previewed_buffers'] =", "'path': context['path'], }) self._start(context['sources_queue'][0], context) if context['sources_queue']: context['sources_queue'].pop(0) context['path'] =", "if self._is_async: self._start_timer('update_candidates') else: self._stop_timer('update_candidates') updated = (self._displayed_texts != prev_displayed_texts", "Disable quit flag is_quit = False if not is_quit and", "+ \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\" + linenr + \"}%*\")) def", "opened_pos self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'cursor', 'row':", "False [self._is_async, pattern, statuses, self._entire_len, self._candidates] = self._denite.filter_candidates(self._context) prev_displayed_texts =", "bool = False) -> None: if is_manual: candidates = self._get_selected_candidates()", "= ASyncParent(self._vim) # else: self._denite = SyncParent(self._vim) self._result = []", "self._winid) self._init_cursor() self._move_to_pos(self._cursor) if not is_current_buffer: self._vim.call('win_gotoid', save_winid) elif is_current_buffer:", "self._vim.command('setlocal nospell') self._vim.command('setlocal winfixheight') self._vim.command('setlocal nowrap') if self._context['prompt']: self._vim.command('setlocal signcolumn=yes')", "= 'NW' row = 1 self._context['filter_winrow'] = row + height", "to denite window self._init_buffer() self.do_action('default') candidate = self._get_cursor_candidate() if not", "self._displayed_texts = [] self._prev_bufnr = self._vim.current.buffer.number self._prev_curpos = self._vim.call('getcurpos') self._prev_wininfo", "'buffer_name': self._context['buffer_name'], } self._bufvars['denite_statusline'] = {} self._vim.vars['denite#_previewed_buffers'] = {} self._save_window_options", "self._context['input']: self._vim.call('setreg', '/', self._context['input']) return self._updated def _update_displayed_texts(self) -> None:", "# In Vim8, FileType autocmd is not fired after set", "def _update_displayed_texts(self) -> None: candidates_len = len(self._candidates) if not self._is_async", "self._vim.call('win_gotoid', self._winid) if not is_vertical and self._vim.current.window.height != winheight: if", "-> None: self._vim = vim self._denite: typing.Optional[SyncParent] = None self._selected_candidates:", "' + 'deniteStatusLinePath Comment') self._vim.command('highlight default link ' + 'deniteStatusLineNumber", "== 'short' else name return source_name def _get_candidate_display_text(self, index: int)", "' + 'deniteSelectedLine Statement') if self._floating: self._vim.current.window.options['winhighlight'] = ( 'Normal:'", "def _get_selected_candidates(self) -> Candidates: if not self._selected_candidates: return [self._get_cursor_candidate() ]", "if key not in self._timers: return self._vim.call('timer_stop', self._timers[key]) # Note:", "not [x for x in self._displayed_texts if self._vim.call('strwidth', x) >", "{ 'relative': 'editor', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height':", "key: str) -> None: if key in self._timers: return if", "= self._vim.call('bufadd', bufname) vertical = 'vertical' if split == 'vertical'", "-> typing.List[typing.Any]: return [ self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ] def", "winheight, }) filter_col = init_pos['col'] if init_pos['anchor'] == 'NW': winpos", "candidate = self._get_cursor_candidate() if not candidate: return echo(self._vim, 'Normal', '[{}/{}]", "height, 'anchor': anchor, }) elif split == 'floating_relative_window': self._vim.call( 'nvim_open_win',", "self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal nofoldenable')", "bufname = '[denite]-' + self._context['buffer_name'] if self._vim.call('exists', '*bufadd'): bufnr =", "is_quit: self.quit() self._denite.do_action(self._context, action_name, candidates) self._result = candidates if command", "= {} self._matched_range_id = -1 self._matched_char_id = -1 self._check_matchdelete =", "self._winwidth = self._context['winwidth'] def _gather_candidates(self) -> None: self._selected_candidates = []", "clearmatch(self._vim) def _get_cursor_candidate(self) -> Candidate: return self._get_candidate(self._cursor) def _get_candidate(self, pos:", "= (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name) if re.search(r'[^a-zA-Z]', name) else name[:2]) source_name", "']' status = { 'input': inpt, 'sources': self._statusline_sources, 'path': path,", "window self._vim.call('win_gotoid', self._winid) # Restore the window if self._context['split'] ==", "else name[:2]) source_name = short_name if source_names == 'short' else", "None: self._prev_status = dict() self._displayed_texts = [] self._prev_bufnr = self._vim.current.buffer.number", "self._candidates: Candidates = [] self._cursor = 0 self._entire_len = 0", "== self._get_candidate(prev_cursor): # Restore the cursor self._move_to_pos(prev_cursor) # Disable quit", "!= -1 and candidates_len < winminheight): self._winheight = winminheight elif", "is_manual: self._selected_candidates = [] self.redraw(action['is_redraw']) if is_manual and self._context['sources_queue']: self._context['input']", "'deniteStatusLineNumber LineNR') self._vim.command('highlight default link ' + 'deniteSelectedLine Statement') if", "split = self._context['split'] if (split == 'no' or split ==", "self._vim.call('getmatches', self._winid)] if self._matched_range_id in matches: self._vim.call('matchdelete', self._matched_range_id, self._winid) self._matched_range_id", "' 'call denite#call_map(\"auto_action\")') self._init_syntax() def _switch_buffer(self) -> None: split =", "'$': self._move_to_last_line() def _do_immediately(self) -> None: goto = self._winid >", "'number', 'relativenumber', 'signcolumn', 'spell', 'winfixheight', 'wrap', } for k in", "options['modeline'] = False options['modifiable'] = False options['filetype'] = 'denite' if", "None: self._context['input'] = '' self._quit_buffer() self._init_denite() self._gather_candidates() self._init_buffer() self._update_candidates() self._update_buffer()", "winwidth = self._vim.call('winwidth', 0) is_fit = not [x for x", "-1 self._matched_char_id = -1 self._check_matchdelete = bool(self._vim.call( 'denite#util#check_matchdelete')) def start(self,", "else: self._vim.call( 'denite#util#execute_path', f'silent keepalt {command}', bufname) def _get_direction(self) ->", "self._winid)): if split != 'vertical' and not self._floating: # Move", "pos if self._context['reversed']: self._vim.command('normal! zb') def _move_to_next_line(self) -> None: if", "!= winwidth: self._vim.command('vertical resize ' + str(winwidth)) if not is_current_buffer:", "if self._context['prompt']: self._vim.command('setlocal signcolumn=yes') else: self._vim.command('setlocal signcolumn=auto') if self._context['cursorline']: self._vim.command('setlocal", "buffer self._vim.call('win_gotoid', winids[0]) self._close_current_window() # Move to denite window self._vim.call('win_gotoid',", "'wrap', } for k in window_options: self._save_window_options[k] = self._vim.current.window.options[k] #", "self._move_to_prev_line() elif self._context['cursor_pos'] == '$': self._move_to_last_line() def _do_immediately(self) -> None:", "'vertical' else '' command = ( 'buffer' if split in", "self._entire_len, self._candidates] = self._denite.filter_candidates(self._context) prev_displayed_texts = self._displayed_texts self._update_displayed_texts() prev_matched_pattern =", "'row': row, 'col': self._context['wincol'], 'width': winwidth, 'height': winheight, }) filter_row", "action: return post_action = self._context['post_action'] is_quit = action['is_quit'] or post_action", "elif is_vertical and self._vim.current.window.width != winwidth: self._vim.command('vertical resize ' +", "else: candidates = [] if not self._denite or not candidates", "( \"%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} \" + \" %{denite#get_status('path')}%*\" +", "self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd BufWinEnter') if not self._vim.call('has', 'nvim'): # In", "( '{:<' + str(self._context['max_source_name_len']) + '}') self._displayed_texts = [ self._get_candidate_display_text(i)", "split == 'tab' or self._vim.call('winnr', '$') == 1): return winheight", "update: self._context[key] = context[key] self._check_move_option() if self._check_do_option(): return self._init_buffer() if", "dict() self._displayed_texts = [] self._prev_bufnr = self._vim.current.buffer.number self._prev_curpos = self._vim.call('getcurpos')", "if goto: # Jump to denite window self._init_buffer() self.do_action('default') candidate", "= False) -> None: if is_manual: candidates = self._get_selected_candidates() elif", "elif self._floating: self._split_floating(split) elif self._context['filter_split_direction'] == 'floating': self._filter_floating = True", "not is_current_buffer: save_winid = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) self._init_cursor() self._move_to_pos(self._cursor) if", "'deniteMatchedChar', matched_char_pattern, 10, -1, {'window': self._winid}) prev_linenr = self._vim.call('line', '.')", "0 self._result: typing.List[typing.Any] = [] self._context: UserContext = {} self._bufnr", "self._start_sources_queue(context) return self._result def do_action(self, action_name: str, command: str =", "{} self._bufnr = -1 self._winid = -1 self._winrestcmd = ''", "('immediately', 'immediately_1', 'cursor_pos', 'prev_winid', 'start_filter', 'quick_move') for key in update:", "x in self._candidates] buffer[:] = self._displayed_texts buffer.options['modifiable'] = False self._previous_text", "self._init_buffer() self.redraw(False) if cursor_candidate == self._get_candidate(prev_cursor): # Restore the cursor", "denite.ui.map import do_map self._vim.command('silent! autocmd! denite') if re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name):", "or direction == 'dynamicbottom': self._update_displayed_texts() winwidth = self._vim.call('winwidth', 0) is_fit", "self._vim.call('timer_stop', self._timers[key]) # Note: After timer_stop is called, self._timers may", "'win', 'win': init_pos['win'], 'row': init_pos['row'], 'col': init_pos['col'], 'width': winwidth, 'height':", "self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar', winnr, '&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')}", "Clear previewed buffers for bufnr in self._vim.vars['denite#_previewed_buffers'].keys(): if not self._vim.call('win_findbuf',", "for x in self._vim.call('getmatches', self._winid)] if self._matched_range_id in matches: self._vim.call('matchdelete',", "pos: int) -> None: self._vim.call('cursor', pos, 0) self._cursor = pos", "if self._floating: # Disable ruler self._vim.options['ruler'] = False options['buftype'] =", "= 1 while cursor < len(self._candidates): self.do_action('default', command) self._move_to_next_line() self._quit_buffer()", "1) winwidth = max(self._winwidth, 1) is_vertical = split == 'vertical'", "self._matched_pattern != prev_matched_pattern or self._statusline_sources != prev_statusline_sources) if updated: self._updated", "+ self._context['highlight_matched_range']) self._vim.command('highlight link deniteMatchedChar ' + self._context['highlight_matched_char']) self._vim.command('highlight default", "self._context['filter_winrow'] = row + opened_pos else: anchor = 'NW' row", "= False if self._vim.current.buffer.options['filetype'] != 'denite': self._titlestring = self._vim.options['titlestring'] command", "status == self._prev_status: return self._bufvars['denite_statusline'] = status self._prev_status = status", "self._move_to_pos(self._cursor) else: if self._context != context: self._context.clear() self._context.update(context) self._context['sources'] =", "if not self._context['has_preview_window']: self._vim.command('pclose!') # Clear previewed buffers for bufnr", "self._vim.options['titlestring'] = ( \"%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} \" + \"", "= '' if self._context['input']: inpt = self._context['input'] + ' '", "'relative': 'editor', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'],", "self._context['winminheight'] max_height = min(self._context['winheight'], self._get_max_height()) if (winminheight != -1 and", "winheight = max(self._winheight, 1) winwidth = max(self._winwidth, 1) is_vertical =", "self._vim.call('win_getid') self._resize_buffer(True) self._winheight = self._vim.current.window.height self._winwidth = self._vim.current.window.width self._bufvars =", "self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'editor', 'row': row, 'col': self._context['wincol'], 'width':", "False if not is_quit and is_manual: self._selected_candidates = [] self.redraw(action['is_redraw'])", "(self._displayed_texts != prev_displayed_texts or self._matched_pattern != prev_matched_pattern or self._statusline_sources !=", "0 if wincol == 1 else row + winheight filter_col", "'win': init_pos['win'], 'row': init_pos['row'], 'col': init_pos['col'], 'width': winwidth, 'height': winheight,", "None: self._vim.call('cursor', pos, 0) self._cursor = pos if self._context['reversed']: self._vim.command('normal!", "'NW' row = 1 self._context['filter_winrow'] = row + height +", "-> typing.List[typing.Any]: if not self._denite: # if hasattr(self._vim, 'run_coroutine'): #", "def _move_to_pos(self, pos: int) -> None: self._vim.call('cursor', pos, 0) self._cursor", "UserContext) -> typing.List[typing.Any]: if not self._denite: # if hasattr(self._vim, 'run_coroutine'):", "- int(self._context['winrow']) - int(self._vim.options['cmdheight'])) def _resize_buffer(self, is_current_buffer: bool) -> None:", "width, 'height': height, 'anchor': anchor, }) elif split == 'floating_relative_window':", "self._update_candidates() self._init_cursor() self._check_move_option() if self._check_do_option(): return self._init_buffer() self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor)", "elif candidates_len != self._winheight: self._winheight = candidates_len max_source_name_len = 0", "window_options: self._save_window_options[k] = self._vim.current.window.options[k] # Note: Have to use setlocal", "= self._context['prev_winid'] self._winrestcmd = self._vim.call('winrestcmd') self._ruler = self._vim.options['ruler'] self._switch_buffer() self._bufnr", "in matches: self._vim.call('matchdelete', self._matched_range_id, self._winid) self._matched_range_id = -1 if self._matched_char_id", "!= self._context['input'])) if self._updated and is_changed: if not is_current_buffer: save_winid", "-= self._winheight self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'editor', 'row': row, 'col':", "' + str(self._prev_bufnr)) def _init_syntax(self) -> None: self._vim.command('syntax case ignore')", "self._init_syntax() def _switch_buffer(self) -> None: split = self._context['split'] if (split", "self._vim.command('highlight link deniteMatchedChar ' + self._context['highlight_matched_char']) self._vim.command('highlight default link '", "0: self._vim.call('nvim_win_set_config', filter_winid, { 'relative': 'editor', 'row': filter_row, 'col': filter_col,", "re.match(r'\\+\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif re.match(r'-\\d+', self._context['cursor_pos']):", "the previous window self._vim.command('wincmd p') def _do_command(self, command: str) ->", "self._get_direction(), vertical, command, bufnr, ) ) else: self._vim.call( 'denite#util#execute_path', f'silent", "= self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if winids: # Quit filter buffer self._vim.call('win_gotoid',", "self._update_candidates(): self._update_buffer() else: self._update_status() self._context['is_redraw'] = False def quit(self) ->", "{ 'relative': 'editor', 'row': filter_row, 'col': filter_col, }) self._vim.command('resize '", "+ self._context['highlight_window_background'] ) self._vim.command(('syntax match deniteSelectedLine /^[%s].*/' + ' contains=deniteConcealedMark')", "# Note: Close filter window before preview window self._vim.call('denite#filter#_close_filter_window') if", "'': self._do_command(self._context['do']) return True elif (self._candidates and self._context['immediately'] or len(self._candidates)", "deniteInput ModeMsg') self._vim.command('highlight link deniteMatchedRange ' + self._context['highlight_matched_range']) self._vim.command('highlight link", "line window. return resume = self._initialized and context['resume'] if resume:", "self._winid}) matched_char_pattern = '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace(' ', '') ))", "if split == 'floating': if self._context['auto_resize'] and row > 1:", "prev_candidate = self._get_cursor_candidate() buffer = self._vim.buffers[self._bufnr] buffer.options['modifiable'] = True self._vim.vars['denite#_candidates']", "self._floating = split in [ 'floating', 'floating_relative_cursor', 'floating_relative_window', ] self._filter_floating", "' + inpt path = '[' + self._context['path'] + ']'", "autocmd is not fired after set filetype option. self._vim.command('silent doautocmd", "= False self._matched_pattern = '' self._displayed_texts: typing.List[str] = [] self._statusline_sources", "if self._floating or self._filter_floating: self._vim.options['titlestring'] = self._titlestring self._vim.options['ruler'] = self._ruler", "in [ 'floating', 'floating_relative_cursor', 'floating_relative_window', ] self._filter_floating = False if", "'height': self._context['winheight'], }) elif split == 'floating_relative_cursor': opened_pos = (self._vim.call('nvim_win_get_position',", "= '' self._displayed_texts: typing.List[str] = [] self._statusline_sources = '' self._titlestring", "False self._is_multi = len(sources) > 1 if not sources: #", "context['sources_queue'] = [sources] self._start_sources_queue(context) return self._result def do_action(self, action_name: str,", "'col': self._context['wincol'], 'width': winwidth, 'height': winheight, }) filter_row = 0", "the cursor self._move_to_pos(prev_cursor) # Disable quit flag is_quit = False", "self._context['prev_winid'] self._winrestcmd = self._vim.call('winrestcmd') self._ruler = self._vim.options['ruler'] self._switch_buffer() self._bufnr =", "'floating', 'floating_relative_cursor', 'floating_relative_window', ] self._filter_floating = False if self._vim.current.buffer.options['filetype'] !=", "default.py # AUTHOR: <NAME> <<EMAIL> at g<EMAIL>> # License: MIT", "-> str: source_names = self._context['source_names'] candidate = self._candidates[index] terms =", "ASyncParent(self._vim) # else: self._denite = SyncParent(self._vim) self._result = [] context['sources_queue']", "None: self._cleanup() if self._vim.call('bufwinnr', self._bufnr) < 0: # Denite buffer", "self._context['split'] == 'no': self._switch_prev_buffer() for k, v in self._save_window_options.items(): self._vim.current.window.options[k]", "winids: # Quit filter buffer self._vim.call('win_gotoid', winids[0]) self._close_current_window() # Move", "None: self._stop_timer('update_candidates') self._stop_timer('update_buffer') if self._vim.current.buffer.number == self._bufnr: self._cursor = self._vim.call('line',", "= None self._selected_candidates: typing.List[int] = [] self._candidates: Candidates = []", "self._context['error_messages']: inpt = '[ERROR] ' + inpt path = '['", "self._close_current_window() self._vim.call('win_gotoid', self._prev_winid) # Restore the position self._vim.call('setpos', '.', self._prev_curpos)", "[self._get_cursor_candidate()] else: candidates = [] if not self._denite or not", "self._vim.buffers[self._bufnr] buffer.options['modifiable'] = True self._vim.vars['denite#_candidates'] = [ x['word'] for x", "_move_to_pos(self, pos: int) -> None: self._vim.call('cursor', pos, 0) self._cursor =", "= init_pos['col'] if init_pos['anchor'] == 'NW': winpos = self._vim.call('nvim_win_get_position', self._winid)", "self._resize_buffer(is_current_buffer) is_changed = (self._context['reversed'] or (is_current_buffer and self._previous_text != self._context['input']))", "typing.Optional[SyncParent] = None self._selected_candidates: typing.List[int] = [] self._candidates: Candidates =", "self._vim.vars['denite#_previewed_buffers'] = {} self._vim.command('highlight! link CursorLine CursorLine') if self._floating or", "self._vim.command('setlocal signcolumn=auto') if self._context['cursorline']: self._vim.command('setlocal cursorline') options = self._vim.current.buffer.options if", "-> None: if key not in self._timers: return self._vim.call('timer_stop', self._timers[key])", "= SyncParent(self._vim) self._result = [] context['sources_queue'] = [sources] self._start_sources_queue(context) return", "fired after set filetype option. self._vim.command('silent doautocmd FileType denite') if", "elif split == 'floating_relative_cursor': opened_pos = (self._vim.call('nvim_win_get_position', 0)[0] + self._vim.call('winline')", "from denite.util import Nvim, UserContext, Candidates, Candidate from denite.parent import", "candidates = [] if not self._denite or not candidates or", "self._resize_buffer(True) self._winheight = self._vim.current.window.height self._winwidth = self._vim.current.window.width self._bufvars = self._vim.current.buffer.vars", "'nofile' options['bufhidden'] = 'delete' options['swapfile'] = False options['buflisted'] = False", "= self._ruler def _close_current_window(self) -> None: if self._vim.call('winnr', '$') ==", "len(self._candidates): return {} return self._candidates[pos - 1] def _get_selected_candidates(self) ->", "'relative': 'win', 'win': init_pos['win'], 'row': init_pos['row'], 'col': init_pos['col'], 'width': winwidth,", "= self._vim.call('nvim_win_get_position', self._winid) filter_row = winpos[0] + winheight filter_winid =", "if (split != 'no' and self._winid > 0 and self._vim.call('win_gotoid',", "if self._get_wininfo() and self._get_wininfo() == self._prev_wininfo: # Note: execute restcmd", "to use setlocal instead of \"current.window.options\" # \"current.window.options\" changes global", "self._vim.current.buffer.number self._winid = self._vim.call('win_getid') self._resize_buffer(True) self._winheight = self._vim.current.window.height self._winwidth =", "self._start_sources_queue(self._context) return def redraw(self, is_force: bool = True) -> None:", "not fired after set filetype option. self._vim.command('silent doautocmd FileType denite')", "self._winheight = self._context['winheight'] self._winwidth = self._context['winwidth'] def _gather_candidates(self) -> None:", "self._vim.command('highlight link deniteMatchedRange ' + self._context['highlight_matched_range']) self._vim.command('highlight link deniteMatchedChar '", "inpt, 'sources': self._statusline_sources, 'path': path, # Extra 'buffer_name': self._context['buffer_name'], 'line_total':", "= ('immediately', 'immediately_1', 'cursor_pos', 'prev_winid', 'start_filter', 'quick_move') for key in", "and self._context['immediately_1']): self._do_immediately() return True return not (self._context['empty'] or self._is_async", "True, { 'relative': 'win', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'],", "'editor', 'row': filter_row, 'col': filter_col, }) self._vim.command('resize ' + str(winheight))", "-> Candidates: if not self._selected_candidates: return [self._get_cursor_candidate() ] if self._get_cursor_candidate()", "self._context['reversed']: self._move_to_last_line() else: self._move_to_first_line() def _move_to_pos(self, pos: int) -> None:", "= '', is_manual: bool = False) -> None: if is_manual:", "or post_action == 'quit' if is_quit: self.quit() self._denite.do_action(self._context, action_name, candidates)", "+ ' '.join(terms).replace('\\n', '') def _get_max_height(self) -> int: return int(self._vim.options['lines'])", "None: if self._cursor < len(self._candidates): self._cursor += 1 def _move_to_prev_line(self)", "or self._filter_floating: self._vim.options['titlestring'] = ( \"%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} \"", "self._winid) filter_row = winpos[0] + winheight filter_winid = self._vim.vars['denite#_filter_winid'] self._context['filter_winrow']", "if self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd BufWinEnter') if not self._vim.call('has', 'nvim'): #", "'') )) self._matched_char_id = self._vim.call( 'matchadd', 'deniteMatchedChar', matched_char_pattern, 10, -1,", "denite window self._vim.call('win_gotoid', self._winid) # Restore the window if self._context['split']", "' ') + ' '.join(terms).replace('\\n', '') def _get_max_height(self) -> int:", "[] if self._denite: self._denite.gather_candidates(self._context) def _init_cursor(self) -> None: if self._context['reversed']:", ") ) else: self._vim.call( 'denite#util#execute_path', f'silent keepalt {command}', bufname) def", "0 self._context['filter_winrow'] = row + opened_pos else: anchor = 'NW'", "self._context['input'].replace(' ', '') )) self._matched_char_id = self._vim.call( 'matchadd', 'deniteMatchedChar', matched_char_pattern,", "- 1) if self._context['auto_resize']: height = max(self._winheight, 1) width =", "is_current_buffer: bool) -> None: split = self._context['split'] if (split ==", "source_names = self._context['source_names'] if not self._is_multi or source_names == 'hide':", "if direction == 'dynamictop': direction = 'aboveleft' if is_fit else", "False self._matched_pattern = '' self._displayed_texts: typing.List[str] = [] self._statusline_sources =", "self._vim.command('setlocal cursorline') options = self._vim.current.buffer.options if self._floating: # Disable ruler", "0 if self._candidates: max_source_name_len = max([ len(self._get_display_source_name(x['source_name'])) for x in", "return self._init_denite() self._gather_candidates() self._update_candidates() self._init_cursor() self._check_move_option() if self._check_do_option(): return self._init_buffer()", "is_changed = (self._context['reversed'] or (is_current_buffer and self._previous_text != self._context['input'])) if", "'' else: short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name) if re.search(r'[^a-zA-Z]', name)", ") self._vim.command(('syntax match deniteSelectedLine /^[%s].*/' + ' contains=deniteConcealedMark') % (", "self._winheight = winminheight elif candidates_len > max_height: self._winheight = max_height", "if self._vim.call('strwidth', x) > winwidth] if direction == 'dynamictop': direction", "self._context['filter_winrow'] = row if self._vim.call('win_id2win', filter_winid) > 0: self._vim.call('nvim_win_set_config', filter_winid,", "= False self._timers: typing.Dict[str, int] = {} self._matched_range_id = -1", "'relative': 'editor', 'row': filter_row, 'col': filter_col, }) self._vim.command('resize ' +", "None self._selected_candidates: typing.List[int] = [] self._candidates: Candidates = [] self._cursor", "self._gather_candidates() if self._update_candidates(): self._update_buffer() else: self._update_status() self._context['is_redraw'] = False def", "in window_options: self._save_window_options[k] = self._vim.current.window.options[k] # Note: Have to use", "+ linenr + \"}%*\") else: winnr = self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar',", "'&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} %=\" + \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\"", "(self._candidates and self._context['immediately'] or len(self._candidates) == 1 and self._context['immediately_1']): self._do_immediately()", "if index in self._selected_candidates else ' ') + ' '.join(terms).replace('\\n',", "if goto: # Move to the previous window self._vim.command('wincmd p')", "self._vim.command('pclose!') # Clear previewed buffers for bufnr in self._vim.vars['denite#_previewed_buffers'].keys(): if", "post_action == 'open': # Re-open denite buffer prev_cursor = self._cursor", "{} self._vim.vars['denite#_previewed_buffers'] = {} self._save_window_options = {} window_options = {", "!= '': self._vim.command(command) if is_quit and post_action == 'open': #", "None: is_current_buffer = self._bufnr == self._vim.current.buffer.number self._update_status() if self._check_matchdelete and", "signcolumn=yes') else: self._vim.command('setlocal signcolumn=auto') if self._context['cursorline']: self._vim.command('setlocal cursorline') options =", "def _start(self, sources: typing.List[typing.Any], context: UserContext) -> None: from denite.ui.map", "= self._context['input'] self._resize_buffer(is_current_buffer) is_changed = (self._context['reversed'] or (is_current_buffer and self._previous_text", "self._sources_history.append({ 'sources': context['sources_queue'][0], 'path': context['path'], }) self._start(context['sources_queue'][0], context) if context['sources_queue']:", "deniteSelectedLine /^[%s].*/' + ' contains=deniteConcealedMark') % ( self._context['selected_icon'])) self._vim.command(('syntax match", "- 1] def _get_selected_candidates(self) -> Candidates: if not self._selected_candidates: return", "self._prev_winid) # Restore the position self._vim.call('setpos', '.', self._prev_curpos) if self._get_wininfo()", "self._vim.call('denite#filter#_close_filter_window') if not self._context['has_preview_window']: self._vim.command('pclose!') # Clear previewed buffers for", "self._vim.call('winline') - 1) if self._context['auto_resize']: height = max(self._winheight, 1) width", "do_map(self, 'open_filter_buffer', []) def _init_buffer(self) -> None: self._prev_status = dict()", "'': self._matched_range_id = self._vim.call( 'matchadd', 'deniteMatchedRange', r'\\c' + regex_convert_py_vim(self._matched_pattern), 10,", "if split == 'vertical' else '' command = ( 'buffer'", "] def _update_buffer(self) -> None: is_current_buffer = self._bufnr == self._vim.current.buffer.number", "not self._is_async and self._context['auto_resize']: winminheight = self._context['winminheight'] max_height = min(self._context['winheight'],", "if split == 'tab': self._vim.command('tabnew') elif self._floating: self._split_floating(split) elif self._context['filter_split_direction']", "candidates if command != '': self._vim.command(command) if is_quit and post_action", "!= prev_statusline_sources) if updated: self._updated = True self._start_timer('update_buffer') if self._context['search']", "== 'vertical' if not is_current_buffer: restore = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid)", "self._init_cursor() self._check_move_option() if self._check_do_option(): return self._init_buffer() self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor) if", "!= prev_displayed_texts or self._matched_pattern != prev_matched_pattern or self._statusline_sources != prev_statusline_sources)", "'sources': self._statusline_sources, 'path': path, # Extra 'buffer_name': self._context['buffer_name'], 'line_total': len(self._candidates),", "import typing from denite.util import echo, error, clearmatch, regex_convert_py_vim from", "= self._context['split'] if (split != 'no' and self._winid > 0", "= max(self._winwidth, 1) is_vertical = split == 'vertical' if not", "candidates_len != self._winheight: self._winheight = candidates_len max_source_name_len = 0 if", "key == 'update_candidates': self._timers[key] = self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr) elif key", "if not is_current_buffer: restore = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) if not", "split == 'vertical' else '' command = ( 'buffer' if", "matches = [x['id'] for x in self._vim.call('getmatches', self._winid)] if self._matched_range_id", "[] return [self._candidates[x] for x in self._selected_candidates] def _init_denite(self) ->", "False self._stop_timer('update_buffer') def _update_status(self) -> None: inpt = '' if", ") else: self._vim.call( 'denite#util#execute_path', f'silent keepalt {command}', bufname) def _get_direction(self)", "before preview window self._vim.call('denite#filter#_close_filter_window') if not self._context['has_preview_window']: self._vim.command('pclose!') # Clear", "hasattr(self._vim, 'run_coroutine'): # self._denite = ASyncParent(self._vim) # else: self._denite =", "'cursor_pos', 'prev_winid', 'start_filter', 'quick_move') for key in update: self._context[key] =", "-1 self._is_multi = False self._is_async = False self._matched_pattern = ''", "context[key] self._check_move_option() if self._check_do_option(): return self._init_buffer() if context['refresh']: self.redraw() self._move_to_pos(self._cursor)", "self._floating or self._filter_floating: self._vim.options['titlestring'] = ( \"%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')}", "'') def _get_max_height(self) -> int: return int(self._vim.options['lines']) if not self._floating", "# Disable quit flag is_quit = False if not is_quit", "and self._previous_text != self._context['input'])) if self._updated and is_changed: if not", "1: row += self._context['winheight'] row -= self._winheight self._vim.call('nvim_win_set_config', self._winid, {", "self._denite: self._denite.on_close(self._context) self._quit_buffer() self._result = [] return def _restart(self) ->", "not is_vertical and self._vim.current.window.height != winheight: if self._floating: wincol =", "= 'SW' row = 0 self._context['filter_winrow'] = row + opened_pos", "split != 'vertical' and not self._floating: # Move the window", "< len(self._candidates): self.do_action('default', command) self._move_to_next_line() self._quit_buffer() def _cleanup(self) -> None:", "'win', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'], })", "source_names == 'short' else name return source_name def _get_candidate_display_text(self, index:", "( int(self._vim.options['lines']) - int(self._context['winrow']) - int(self._vim.options['cmdheight'])) def _resize_buffer(self, is_current_buffer: bool)", "self._vim.call('win_gotoid', restore) def _check_do_option(self) -> bool: if self._context['do'] != '':", "elif split == 'floating_relative_window': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, {", "# Ignore command line window. return resume = self._initialized and", "0: # Denite buffer is already closed return winids =", "if self._vim.call('winnr', '$') == 1: self._vim.command('buffer #') else: self._vim.command('close!') def", "self._vim.current.buffer.number == self._bufnr: self._cursor = self._vim.call('line', '.') # Note: Close", "{ 'input': inpt, 'sources': self._statusline_sources, 'path': path, # Extra 'buffer_name':", "None: from denite.ui.map import do_map self._vim.command('silent! autocmd! denite') if re.search(r'\\[Command", "1: self._vim.command('buffer #') else: self._vim.command('close!') def _quit_buffer(self) -> None: self._cleanup()", "encoding, errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon']) if index in self._selected_candidates", "nonumber') self._vim.command('setlocal norelativenumber') self._vim.command('setlocal nospell') self._vim.command('setlocal winfixheight') self._vim.command('setlocal nowrap') if", "== 'vertical' else ' split' bufname = '[denite]-' + self._context['buffer_name']", "!= 'vertical' and not self._floating: # Move the window to", "self._init_cursor() self._move_to_pos(self._cursor) if not is_current_buffer: self._vim.call('win_gotoid', save_winid) elif is_current_buffer: self._vim.call('cursor',", "and source_names != 'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding = self._context['encoding'] abbr", "= (self._vim.call('nvim_win_get_position', 0)[0] + self._vim.call('winline') - 1) if self._context['auto_resize']: height", "= len(self._candidates) if not self._is_async and self._context['auto_resize']: winminheight = self._context['winminheight']", "!= context: self._context.clear() self._context.update(context) self._context['sources'] = sources self._context['is_redraw'] = False", "' '.join(statuses) if self._is_async: self._start_timer('update_candidates') else: self._stop_timer('update_candidates') updated = (self._displayed_texts", "def _init_denite(self) -> None: if self._denite: self._denite.start(self._context) self._denite.on_init(self._context) self._initialized =", "winminheight): self._winheight = winminheight elif candidates_len > max_height: self._winheight =", "( 'Normal:' + self._context['highlight_window_background'] ) self._vim.command(('syntax match deniteSelectedLine /^[%s].*/' +", "_resize_buffer(self, is_current_buffer: bool) -> None: split = self._context['split'] if (split", "else: init_pos = self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'win',", "window_options = { 'colorcolumn', 'concealcursor', 'conceallevel', 'cursorcolumn', 'cursorline', 'foldcolumn', 'foldenable',", "= self._context['winheight'] if opened_pos + height + 3 > self._vim.options['lines']:", "if self._vim.call('exists', '*bufadd'): bufnr = self._vim.call('bufadd', bufname) vertical = 'vertical'", "self._denite.init_syntax(self._context, self._is_multi) def _update_candidates(self) -> bool: if not self._denite: return", "in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif self._context['cursor_pos'] == '$': self._move_to_last_line() def _do_immediately(self)", "= {} self._save_window_options = {} window_options = { 'colorcolumn', 'concealcursor',", "self._winid) if goto: # Jump to denite window self._init_buffer() self.do_action('default')", "not candidates or not action_name: return self._prev_action = action_name action", "if self._context['auto_action']: self._vim.command('autocmd denite ' 'CursorMoved <buffer> ' 'call denite#call_map(\"auto_action\")')", "bool: if self._context['do'] != '': self._do_command(self._context['do']) return True elif (self._candidates", "= [] if self._is_multi and source_names != 'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name'])))", "self._vim.call('win_gotoid', save_winid) elif is_current_buffer: self._vim.call('cursor', [prev_linenr, 0]) if is_current_buffer: if", "default link ' + 'deniteSelectedLine Statement') if self._floating: self._vim.current.window.options['winhighlight'] =", "deniteMatchedChar ' + self._context['highlight_matched_char']) self._vim.command('highlight default link ' + 'deniteStatusLinePath", "import do_map self._vim.command('silent! autocmd! denite') if re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name): #", "self._update_buffer() def _start_sources_queue(self, context: UserContext) -> None: if not context['sources_queue']:", "self._vim.command(('syntax match deniteSelectedLine /^[%s].*/' + ' contains=deniteConcealedMark') % ( self._context['selected_icon']))", "== 1: self._vim.command('buffer #') else: self._vim.command('close!') def _quit_buffer(self) -> None:", "Have to use setlocal instead of \"current.window.options\" # \"current.window.options\" changes", "= [] self._prev_bufnr = self._vim.current.buffer.number self._prev_curpos = self._vim.call('getcurpos') self._prev_wininfo =", "0 and self._vim.call('win_gotoid', self._winid)): if split != 'vertical' and not", "Extra 'buffer_name': self._context['buffer_name'], 'line_total': len(self._candidates), } if status == self._prev_status:", "self._candidates[pos - 1] def _get_selected_candidates(self) -> Candidates: if not self._selected_candidates:", "False options['modifiable'] = False options['filetype'] = 'denite' if self._vim.call('exists', '#WinEnter'):", "None: # Use floating window if split == 'floating': self._vim.call(", "self._prev_status = dict() self._displayed_texts = [] self._prev_bufnr = self._vim.current.buffer.number self._prev_curpos", "bool = True) -> None: self._context['is_redraw'] = is_force if is_force:", "self._matched_range_id in matches: self._vim.call('matchdelete', self._matched_range_id, self._winid) self._matched_range_id = -1 if", "CursorLine CursorLine') if self._floating or self._filter_floating: self._vim.options['titlestring'] = self._titlestring self._vim.options['ruler']", "action_name, candidates) if not action: return post_action = self._context['post_action'] is_quit", "def _move_to_prev_line(self) -> None: if self._cursor >= 1: self._cursor -=", "self._vim.current.window.width != winwidth: self._vim.command('vertical resize ' + str(winwidth)) if not", "if self._context['split'] == 'tab': self._vim.command('tabclose!') if self._context['split'] != 'tab': self._close_current_window()", "self._quit_buffer() self._init_denite() self._gather_candidates() self._init_buffer() self._update_candidates() self._update_buffer() def _start_sources_queue(self, context: UserContext)", "self._statusline_sources = '' self._titlestring = '' self._ruler = False self._prev_action", "Restore the window if self._context['split'] == 'no': self._switch_prev_buffer() for k,", "'topleft' else: direction = 'belowright' if is_fit else 'botright' return", "'deniteMatchedRange', r'\\c' + regex_convert_py_vim(self._matched_pattern), 10, -1, {'window': self._winid}) matched_char_pattern =", "self._vim.command(self._winrestcmd) clearmatch(self._vim) def _get_cursor_candidate(self) -> Candidate: return self._get_candidate(self._cursor) def _get_candidate(self,", "'denite#helper#_start_update_buffer_timer', self._bufnr) def _stop_timer(self, key: str) -> None: if key", "context['sources_queue'].pop(0) context['path'] = self._context['path'] def _start(self, sources: typing.List[typing.Any], context: UserContext)", "= context[key] self._check_move_option() if self._check_do_option(): return self._init_buffer() if context['refresh']: self.redraw()", "self._vim.current.buffer.name): # Ignore command line window. return resume = self._initialized", "self._winheight = candidates_len max_source_name_len = 0 if self._candidates: max_source_name_len =", "self._context['cursorline']: self._vim.command('setlocal cursorline') options = self._vim.current.buffer.options if self._floating: # Disable", "'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'cursor', 'row': row, 'col':", "None: self._vim = vim self._denite: typing.Optional[SyncParent] = None self._selected_candidates: typing.List[int]", "return not (self._context['empty'] or self._is_async or self._candidates) def _check_move_option(self) ->", "self._displayed_texts buffer.options['modifiable'] = False self._previous_text = self._context['input'] self._resize_buffer(is_current_buffer) is_changed =", "'' self._quit_buffer() self._init_denite() self._gather_candidates() self._init_buffer() self._update_candidates() self._update_buffer() def _start_sources_queue(self, context:", "'' return self._floating = split in [ 'floating', 'floating_relative_cursor', 'floating_relative_window',", "\"current.window.options\" # \"current.window.options\" changes global value instead of local in", "not sources: # Ignore empty sources. error(self._vim, 'Empty sources') return", "self._denite.on_close(self._context) self._quit_buffer() self._result = [] return def _restart(self) -> None:", "self._vim.command('tabclose!') if self._context['split'] != 'tab': self._close_current_window() self._vim.call('win_gotoid', self._prev_winid) # Restore", "return [ self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ] def _switch_prev_buffer(self) ->", "if hasattr(self._vim, 'run_coroutine'): # self._denite = ASyncParent(self._vim) # else: self._denite", "linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']: if self._floating or self._filter_floating: self._vim.options['titlestring']", "{ 'buffer_name': self._context['buffer_name'], } self._bufvars['denite_statusline'] = {} self._vim.vars['denite#_previewed_buffers'] = {}", "self._vim.call( 'denite#util#execute_path', f'silent keepalt {command}', bufname) def _get_direction(self) -> str:", "str, command: str = '', is_manual: bool = False) ->", "candidates) if not action: return post_action = self._context['post_action'] is_quit =", "'' command = ( 'buffer' if split in ['no', 'tab',", "len(self._candidates), candidate.get('abbr', candidate['word']))) if goto: # Move to the previous", "self._vim.current.window.options['winhighlight'] = ( 'Normal:' + self._context['highlight_window_background'] ) self._vim.command(('syntax match deniteSelectedLine", "winfixheight') self._vim.command('setlocal nowrap') if self._context['prompt']: self._vim.command('setlocal signcolumn=yes') else: self._vim.command('setlocal signcolumn=auto')", "(split == 'no' or split == 'tab' or self._vim.call('winnr', '$')", "max_height = min(self._context['winheight'], self._get_max_height()) if (winminheight != -1 and candidates_len", "in matches: self._vim.call('matchdelete', self._matched_char_id, self._winid) self._matched_char_id = -1 if self._matched_pattern", "Nvim) -> None: self._vim = vim self._denite: typing.Optional[SyncParent] = None", "self._denite: self._denite.init_syntax(self._context, self._is_multi) def _update_candidates(self) -> bool: if not self._denite:", "'denite#helper#_start_update_candidates_timer', self._bufnr) elif key == 'update_buffer': self._timers[key] = self._vim.call( 'denite#helper#_start_update_buffer_timer',", "!= self._winheight: self._winheight = candidates_len max_source_name_len = 0 if self._candidates:", "return self._vim.call('timer_stop', self._timers[key]) # Note: After timer_stop is called, self._timers", "= True elif split != 'no': command = self._get_direction() command", "self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif re.match(r'-\\d+', self._context['cursor_pos']): for", "'height': winheight, }) filter_row = 0 if wincol == 1", "candidates = [self._get_cursor_candidate()] else: candidates = [] if not self._denite", "pattern, statuses, self._entire_len, self._candidates] = self._denite.filter_candidates(self._context) prev_displayed_texts = self._displayed_texts self._update_displayed_texts()", "is_manual: bool = False) -> None: if is_manual: candidates =", "< 0: # Denite buffer is already closed return winids", "self._is_multi and source_names != 'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding = self._context['encoding']", "+ 3 > self._vim.options['lines']: anchor = 'SW' row = 0", "}) filter_row = 0 if wincol == 1 else row", "sources self._context['is_redraw'] = False self._is_multi = len(sources) > 1 if", "self._cursor < len(self._candidates): self._cursor += 1 def _move_to_prev_line(self) -> None:", "= 0 self._winminheight = -1 self._is_multi = False self._is_async =", "self._context['highlight_matched_char']) self._vim.command('highlight default link ' + 'deniteStatusLinePath Comment') self._vim.command('highlight default", "None: if self._vim.call('winnr', '$') == 1: self._vim.command('buffer #') else: self._vim.command('close!')", "neovim. self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal", "self._winrestcmd = self._vim.call('winrestcmd') self._ruler = self._vim.options['ruler'] self._switch_buffer() self._bufnr = self._vim.current.buffer.number", "is_current_buffer: restore = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) if not is_vertical and", "import SyncParent class Default(object): @property def is_async(self) -> bool: return", "self._vim.call('win_gotoid', self._winid)): if split != 'vertical' and not self._floating: #", "is_manual: candidates = self._get_selected_candidates() elif self._get_cursor_candidate(): candidates = [self._get_cursor_candidate()] else:", "value instead of local in # neovim. self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal", "self._updated = False self._stop_timer('update_buffer') def _update_status(self) -> None: inpt =", "if re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name): # Ignore command line window. return", "'tab': self._vim.command('tabclose!') if self._context['split'] != 'tab': self._close_current_window() self._vim.call('win_gotoid', self._prev_winid) #", "if self._floating or self._filter_floating: self._vim.options['titlestring'] = ( \"%{denite#get_status('input')}%* \" +", "self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'], }) elif split", "self._vim.command(('syntax match deniteConcealedMark /^[ %s]/' + ' conceal contained') %", "1) is_vertical = split == 'vertical' if not is_current_buffer: restore", "1): return winheight = max(self._winheight, 1) winwidth = max(self._winwidth, 1)", "-> None: if self._context['reversed']: self._move_to_last_line() else: self._move_to_first_line() def _move_to_pos(self, pos:", "+ 'deniteSelectedLine Statement') if self._floating: self._vim.current.window.options['winhighlight'] = ( 'Normal:' +", "'update_candidates': self._timers[key] = self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr) elif key == 'update_buffer':", "_split_floating(self, split: str) -> None: # Use floating window if", "def _get_wininfo(self) -> typing.List[typing.Any]: return [ self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist')", "self._denite = SyncParent(self._vim) self._result = [] context['sources_queue'] = [sources] self._start_sources_queue(context)", "SyncParent class Default(object): @property def is_async(self) -> bool: return self._is_async", "matches: self._vim.call('matchdelete', self._matched_char_id, self._winid) self._matched_char_id = -1 if self._matched_pattern !=", "+ \"%{denite#get_status('sources')} %=\" + \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\" + linenr", "contains=deniteConcealedMark') % ( self._context['selected_icon'])) self._vim.command(('syntax match deniteConcealedMark /^[ %s]/' +", "self._init_buffer() self._update_candidates() self._update_buffer() def _start_sources_queue(self, context: UserContext) -> None: if", "if is_manual and self._context['sources_queue']: self._context['input'] = '' self._context['quick_move'] = ''", "if self._update_candidates(): self._update_buffer() else: self._update_status() self._context['is_redraw'] = False def quit(self)", "self._move_to_last_line() def _do_immediately(self) -> None: goto = self._winid > 0", "# \"current.window.options\" changes global value instead of local in #", "= max_source_name_len self._context['max_source_name_format'] = ( '{:<' + str(self._context['max_source_name_len']) + '}')", "True self._vim.vars['denite#_candidates'] = [ x['word'] for x in self._candidates] buffer[:]", "if key == 'update_candidates': self._timers[key] = self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr) elif", "self._denite.get_action( self._context, action_name, candidates) if not action: return post_action =", "== 1 else row + winheight filter_col = self._context['wincol'] else:", "split in [ 'floating', 'floating_relative_cursor', 'floating_relative_window', ] self._filter_floating = False", "buffer is already closed return winids = self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if", "r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace(' ', '') )) self._matched_char_id = self._vim.call( 'matchadd',", "\"%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} \" + \" %{denite#get_status('path')}%*\" + \"%{\"", "'prev_winid', 'start_filter', 'quick_move') for key in update: self._context[key] = context[key]", "_close_current_window(self) -> None: if self._vim.call('winnr', '$') == 1: self._vim.command('buffer #')", "closed return winids = self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if winids: # Quit", "Candidate from denite.parent import SyncParent class Default(object): @property def is_async(self)", "= self._vim.call('getcurpos') self._prev_wininfo = self._get_wininfo() self._prev_winid = self._context['prev_winid'] self._winrestcmd =", "False options['modeline'] = False options['modifiable'] = False options['filetype'] = 'denite'", "self._selected_candidates else ' ') + ' '.join(terms).replace('\\n', '') def _get_max_height(self)", "def _move_to_next_line(self) -> None: if self._cursor < len(self._candidates): self._cursor +=", "self._vim.command('setlocal winfixheight') self._vim.command('setlocal nowrap') if self._context['prompt']: self._vim.command('setlocal signcolumn=yes') else: self._vim.command('setlocal", "== 'floating': self._filter_floating = True elif split != 'no': command", "'quit' if is_quit: self.quit() self._denite.do_action(self._context, action_name, candidates) self._result = candidates", "= '' self._floating = False self._filter_floating = False self._updated =", "== 1): return winheight = max(self._winheight, 1) winwidth = max(self._winwidth,", "if not is_vertical and self._vim.current.window.height != winheight: if self._floating: wincol", "cursor_candidate == self._get_candidate(prev_cursor): # Restore the cursor self._move_to_pos(prev_cursor) # Disable", "bufnr, ) ) else: self._vim.call( 'denite#util#execute_path', f'silent keepalt {command}', bufname)", "= max(self._winwidth, 1) else: width = self._context['winwidth'] height = self._context['winheight']", "return self._init_buffer() self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor) if self._context['quick_move'] and do_map(self, 'quick_move',", "'cursorline', 'foldcolumn', 'foldenable', 'list', 'number', 'relativenumber', 'signcolumn', 'spell', 'winfixheight', 'wrap',", "self._get_candidate_display_text(i) for i in range(0, candidates_len) ] def _update_buffer(self) ->", "self._bufvars = self._vim.current.buffer.vars self._bufvars['denite'] = { 'buffer_name': self._context['buffer_name'], } self._bufvars['denite_statusline']", "self._context['reversed']: self._vim.command('normal! zb') elif is_vertical and self._vim.current.window.width != winwidth: self._vim.command('vertical", "self._get_cursor_candidate() self._init_buffer() self.redraw(False) if cursor_candidate == self._get_candidate(prev_cursor): # Restore the", "self._context['sources'] = sources self._context['is_redraw'] = False self._is_multi = len(sources) >", "k, v in self._save_window_options.items(): self._vim.current.window.options[k] = v else: if self._context['split']", "< len(self._candidates): self._cursor += 1 def _move_to_prev_line(self) -> None: if", "self._cursor += 1 def _move_to_prev_line(self) -> None: if self._cursor >=", "self._vim.command('wincmd p') def _do_command(self, command: str) -> None: self._init_cursor() cursor", "if not candidate: return echo(self._vim, 'Normal', '[{}/{}] {}'.format( self._cursor, len(self._candidates),", "typing.Any] = {} self._prev_curpos: typing.List[typing.Any] = [] self._save_window_options: typing.Dict[str, typing.Any]", "buffers for bufnr in self._vim.vars['denite#_previewed_buffers'].keys(): if not self._vim.call('win_findbuf', bufnr): self._vim.command('silent", "-1 and candidates_len < winminheight): self._winheight = winminheight elif candidates_len", "None: split = self._context['split'] if (split != 'no' and self._winid", "vertical = 'vertical' if split == 'vertical' else '' command", "self._stop_timer('update_buffer') if self._vim.current.buffer.number == self._bufnr: self._cursor = self._vim.call('line', '.') #", "removed if key in self._timers: self._timers.pop(key) def _split_floating(self, split: str)", "def _gather_candidates(self) -> None: self._selected_candidates = [] if self._denite: self._denite.gather_candidates(self._context)", "timer_stop is called, self._timers may be removed if key in", "_start(self, sources: typing.List[typing.Any], context: UserContext) -> None: from denite.ui.map import", "self._vim.command('highlight default link ' + 'deniteSelectedLine Statement') if self._floating: self._vim.current.window.options['winhighlight']", "None: if key in self._timers: return if key == 'update_candidates':", "self._context['split'] != 'tab': self._close_current_window() self._vim.call('win_gotoid', self._prev_winid) # Restore the position", "'.', self._prev_curpos) if self._get_wininfo() and self._get_wininfo() == self._prev_wininfo: # Note:", "split == 'floating': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative':", "self._context['immediately_1']): self._do_immediately() return True return not (self._context['empty'] or self._is_async or", "self._vim.call('cursor', [prev_linenr, 0]) if is_current_buffer: if (self._context['auto_action'] and prev_candidate !=", "winheight filter_winid = self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] = row if self._vim.call('win_id2win', filter_winid)", "self.do_action('default', command) self._move_to_next_line() self._quit_buffer() def _cleanup(self) -> None: self._stop_timer('update_candidates') self._stop_timer('update_buffer')", "(re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name) if re.search(r'[^a-zA-Z]', name) else name[:2]) source_name =", "= 0 self._result: typing.List[typing.Any] = [] self._context: UserContext = {}", "max(self._winheight, 1) width = max(self._winwidth, 1) else: width = self._context['winwidth']", "self._context['quick_move'] and do_map(self, 'quick_move', []): return if self._context['start_filter']: do_map(self, 'open_filter_buffer',", "> 0 and self._vim.call('win_gotoid', self._winid)): if split != 'vertical' and", "= row if self._vim.call('win_id2win', filter_winid) > 0: self._vim.call('nvim_win_set_config', filter_winid, {", "len(self._get_display_source_name(x['source_name'])) for x in self._candidates]) self._context['max_source_name_len'] = max_source_name_len self._context['max_source_name_format'] =", "= self._denite.get_action( self._context, action_name, candidates) if not action: return post_action", "Disable ruler self._vim.options['ruler'] = False options['buftype'] = 'nofile' options['bufhidden'] =", "of \"current.window.options\" # \"current.window.options\" changes global value instead of local", "def do_action(self, action_name: str, command: str = '', is_manual: bool", "self._vim.command('tabnew') elif self._floating: self._split_floating(split) elif self._context['filter_split_direction'] == 'floating': self._filter_floating =", "= True self._start_timer('update_buffer') if self._context['search'] and self._context['input']: self._vim.call('setreg', '/', self._context['input'])", "self._vim.command('autocmd denite ' 'CursorMoved <buffer> ' 'call denite#call_map(\"auto_action\")') self._init_syntax() def", "self._cursor = len(self._candidates) def _start_timer(self, key: str) -> None: if", "else 'sbuffer') self._vim.command( 'silent keepalt %s %s %s %s' %", "str) -> None: # Use floating window if split ==", "'', is_manual: bool = False) -> None: if is_manual: candidates", "+ \"}%*\")) def _get_display_source_name(self, name: str) -> str: source_names =", "SyncParent(self._vim) self._result = [] context['sources_queue'] = [sources] self._start_sources_queue(context) return self._result", "self._is_async and self._context['auto_resize']: winminheight = self._context['winminheight'] max_height = min(self._context['winheight'], self._get_max_height())", "status linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']: if self._floating or self._filter_floating:", "self._context[key] = context[key] self._check_move_option() if self._check_do_option(): return self._init_buffer() if context['refresh']:", "x in self._vim.call('getmatches', self._winid)] if self._matched_range_id in matches: self._vim.call('matchdelete', self._matched_range_id,", "typing from denite.util import echo, error, clearmatch, regex_convert_py_vim from denite.util", "self._matched_pattern != '': self._matched_range_id = self._vim.call( 'matchadd', 'deniteMatchedRange', r'\\c' +", "'buffer' if split in ['no', 'tab', 'floating', 'floating_relative_window', 'floating_relative_cursor'] else", "self._context['statusline']: if self._floating or self._filter_floating: self._vim.options['titlestring'] = ( \"%{denite#get_status('input')}%* \"", "match deniteConcealedMark /^[ %s]/' + ' conceal contained') % (", "_stop_timer(self, key: str) -> None: if key not in self._timers:", "properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim) def _get_cursor_candidate(self) -> Candidate: return self._get_candidate(self._cursor)", "self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal nofoldenable') self._vim.command('setlocal foldcolumn=0')", "int) -> None: self._vim.call('cursor', pos, 0) self._cursor = pos if", "= {} self._vim.vars['denite#_previewed_buffers'] = {} self._save_window_options = {} window_options =", "= self._matched_pattern self._matched_pattern = pattern prev_statusline_sources = self._statusline_sources self._statusline_sources =", "if self._cursor >= 1: self._cursor -= 1 def _move_to_first_line(self) ->", "self.redraw(action['is_redraw']) if is_manual and self._context['sources_queue']: self._context['input'] = '' self._context['quick_move'] =", "'colorcolumn', 'concealcursor', 'conceallevel', 'cursorcolumn', 'cursorline', 'foldcolumn', 'foldenable', 'list', 'number', 'relativenumber',", "'concealcursor', 'conceallevel', 'cursorcolumn', 'cursorline', 'foldcolumn', 'foldenable', 'list', 'number', 'relativenumber', 'signcolumn',", "self._save_window_options[k] = self._vim.current.window.options[k] # Note: Have to use setlocal instead", "self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'win', 'win': init_pos['win'], 'row': init_pos['row'], 'col':", "if self._context['auto_resize']: height = max(self._winheight, 1) width = max(self._winwidth, 1)", "len(self._candidates) == 1 and self._context['immediately_1']): self._do_immediately() return True return not", "return direction def _get_wininfo(self) -> typing.List[typing.Any]: return [ self._vim.options['columns'], self._vim.options['lines'],", "' conceal contained') % ( self._context['selected_icon'])) if self._denite: self._denite.init_syntax(self._context, self._is_multi)", "= False self._stop_timer('update_buffer') def _update_status(self) -> None: inpt = ''", "= self._vim.current.window.width self._bufvars = self._vim.current.buffer.vars self._bufvars['denite'] = { 'buffer_name': self._context['buffer_name'],", "if source_names == 'short' else name return source_name def _get_candidate_display_text(self,", "'floating': if self._context['auto_resize'] and row > 1: row += self._context['winheight']", "for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif self._context['cursor_pos'] == '$': self._move_to_last_line()", "self._denite.gather_candidates(self._context) def _init_cursor(self) -> None: if self._context['reversed']: self._move_to_last_line() else: self._move_to_first_line()", "[self._candidates[x] for x in self._selected_candidates] def _init_denite(self) -> None: if", "] if self._get_cursor_candidate() else [] return [self._candidates[x] for x in", "if not self._selected_candidates: return [self._get_cursor_candidate() ] if self._get_cursor_candidate() else []", "'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'], }) elif", "= False self._prev_action = '' self._prev_status: typing.Dict[str, typing.Any] = {}", "= { 'colorcolumn', 'concealcursor', 'conceallevel', 'cursorcolumn', 'cursorline', 'foldcolumn', 'foldenable', 'list',", "# FILE: default.py # AUTHOR: <NAME> <<EMAIL> at g<EMAIL>> #", "ModeMsg') self._vim.command('highlight link deniteMatchedRange ' + self._context['highlight_matched_range']) self._vim.command('highlight link deniteMatchedChar", "'no' or split == 'tab' or self._vim.call('winnr', '$') == 1):", "+ \" %{denite#get_status('path')}%*\" + \"%{\" + linenr + \"}%*\") else:", "self._timers[key]) # Note: After timer_stop is called, self._timers may be", "'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'], }) elif split ==", "self._stop_timer('update_buffer') def _update_status(self) -> None: inpt = '' if self._context['input']:", "self._vim.vars['denite#_candidates'] = [ x['word'] for x in self._candidates] buffer[:] =", "= self._context['post_action'] is_quit = action['is_quit'] or post_action == 'quit' if", "options['buflisted'] = False options['modeline'] = False options['modifiable'] = False options['filetype']", "self._prev_winid = self._context['prev_winid'] self._winrestcmd = self._vim.call('winrestcmd') self._ruler = self._vim.options['ruler'] self._switch_buffer()", "init_pos['anchor'] == 'NW': winpos = self._vim.call('nvim_win_get_position', self._winid) filter_row = winpos[0]", "= [self._get_cursor_candidate()] else: candidates = [] if not self._denite or", "errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon']) if index in self._selected_candidates else '", "= self._get_cursor_candidate() buffer = self._vim.buffers[self._bufnr] buffer.options['modifiable'] = True self._vim.vars['denite#_candidates'] =", "self._update_status() if self._check_matchdelete and self._context['match_highlight']: matches = [x['id'] for x", "# License: MIT license # ============================================================================ import re import typing", "_move_to_prev_line(self) -> None: if self._cursor >= 1: self._cursor -= 1", "self._get_cursor_candidate() if not candidate: return echo(self._vim, 'Normal', '[{}/{}] {}'.format( self._cursor,", "None: self._cursor = len(self._candidates) def _start_timer(self, key: str) -> None:", "Move the window to bottom self._vim.command('wincmd J') self._winrestcmd = ''", "= self._statusline_sources self._statusline_sources = ' '.join(statuses) if self._is_async: self._start_timer('update_candidates') else:", "self._vim.options['titlestring'] = self._titlestring self._vim.options['ruler'] = self._ruler def _close_current_window(self) -> None:", "vertical, command, bufnr, ) ) else: self._vim.call( 'denite#util#execute_path', f'silent keepalt", "post_action == 'quit' if is_quit: self.quit() self._denite.do_action(self._context, action_name, candidates) self._result", "+ ' contains=deniteConcealedMark') % ( self._context['selected_icon'])) self._vim.command(('syntax match deniteConcealedMark /^[", "self._winid) self._vim.call('setwinvar', winnr, '&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} %=\"", "'sbuffer') self._vim.command( 'silent keepalt %s %s %s %s' % (", "filetype option. self._vim.command('silent doautocmd FileType denite') if self._context['auto_action']: self._vim.command('autocmd denite", "self._vim.command('doautocmd WinEnter') if self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd BufWinEnter') if not self._vim.call('has',", "-> None: self._stop_timer('update_candidates') self._stop_timer('update_buffer') if self._vim.current.buffer.number == self._bufnr: self._cursor =", "if self._context['cursorline']: self._vim.command('setlocal cursorline') options = self._vim.current.buffer.options if self._floating: #", "self._context['buffer_name'], 'line_total': len(self._candidates), } if status == self._prev_status: return self._bufvars['denite_statusline']", "prev_matched_pattern = self._matched_pattern self._matched_pattern = pattern prev_statusline_sources = self._statusline_sources self._statusline_sources", "# ============================================================================ # FILE: default.py # AUTHOR: <NAME> <<EMAIL> at", "def _do_command(self, command: str) -> None: self._init_cursor() cursor = 1", "else 'topleft' else: direction = 'belowright' if is_fit else 'botright'", "__init__(self, vim: Nvim) -> None: self._vim = vim self._denite: typing.Optional[SyncParent]", "False if self._vim.current.buffer.options['filetype'] != 'denite': self._titlestring = self._vim.options['titlestring'] command =", "typing.Any] = {} self._sources_history: typing.List[typing.Any] = [] self._previous_text = ''", "if command != '': self._vim.command(command) if is_quit and post_action ==", "re.search(r'[^a-zA-Z]', name) else name[:2]) source_name = short_name if source_names ==", "self._titlestring = '' self._ruler = False self._prev_action = '' self._prev_status:", "self._init_buffer() self.do_action('default') candidate = self._get_cursor_candidate() if not candidate: return echo(self._vim,", "if self._floating: wincol = self._context['winrow'] row = wincol if split", "Line\\]$', self._vim.current.buffer.name): # Ignore command line window. return resume =", "Denite buffer is already closed return winids = self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr'])", "matched_char_pattern, 10, -1, {'window': self._winid}) prev_linenr = self._vim.call('line', '.') prev_candidate", "== 'hide': source_name = '' else: short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1',", "self._denite: self._denite.start(self._context) self._denite.on_init(self._context) self._initialized = True self._winheight = self._context['winheight'] self._winwidth", "False self._updated = False self._timers: typing.Dict[str, int] = {} self._matched_range_id", "'.join(statuses) if self._is_async: self._start_timer('update_candidates') else: self._stop_timer('update_candidates') updated = (self._displayed_texts !=", "return echo(self._vim, 'Normal', '[{}/{}] {}'.format( self._cursor, len(self._candidates), candidate.get('abbr', candidate['word']))) if", "self._split_floating(split) elif self._context['filter_split_direction'] == 'floating': self._filter_floating = True elif split", "[] context['sources_queue'] = [sources] self._start_sources_queue(context) return self._result def do_action(self, action_name:", "row + winheight filter_col = self._context['wincol'] else: init_pos = self._vim.call('nvim_win_get_config',", "self._vim.call('tabpagebuflist') ] def _switch_prev_buffer(self) -> None: if (self._prev_bufnr == self._bufnr", "True, { 'relative': 'editor', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'],", "= self._get_direction() command += ' vsplit' if split == 'vertical'", "window before preview window self._vim.call('denite#filter#_close_filter_window') if not self._context['has_preview_window']: self._vim.command('pclose!') #", "self._move_to_pos(self._cursor) if self._context['quick_move'] and do_map(self, 'quick_move', []): return if self._context['start_filter']:", "if context['sources_queue']: context['sources_queue'].pop(0) context['path'] = self._context['path'] def _start(self, sources: typing.List[typing.Any],", "'input': inpt, 'sources': self._statusline_sources, 'path': path, # Extra 'buffer_name': self._context['buffer_name'],", "self._cursor = int(self._context['cursor_pos']) + 1 elif re.match(r'\\+\\d+', self._context['cursor_pos']): for _", "'no': command = self._get_direction() command += ' vsplit' if split", "= '' self._titlestring = '' self._ruler = False self._prev_action =", "bufnr in self._vim.vars['denite#_previewed_buffers'].keys(): if not self._vim.call('win_findbuf', bufnr): self._vim.command('silent bdelete '", "prev_statusline_sources = self._statusline_sources self._statusline_sources = ' '.join(statuses) if self._is_async: self._start_timer('update_candidates')", "' + str(winheight)) if self._context['reversed']: self._vim.command('normal! zb') elif is_vertical and", "= False options['modeline'] = False options['modifiable'] = False options['filetype'] =", "_start_sources_queue(self, context: UserContext) -> None: if not context['sources_queue']: return self._sources_history.append({", "and self._context['input']: self._vim.call('setreg', '/', self._context['input']) return self._updated def _update_displayed_texts(self) ->", "if key in self._timers: self._timers.pop(key) def _split_floating(self, split: str) ->", "self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) self._init_cursor() self._move_to_pos(self._cursor) if not is_current_buffer: self._vim.call('win_gotoid', save_winid)", "= self._titlestring self._vim.options['ruler'] = self._ruler def _close_current_window(self) -> None: if", "'col': 0, 'width': width, 'height': height, 'anchor': anchor, }) elif", "' split' bufname = '[denite]-' + self._context['buffer_name'] if self._vim.call('exists', '*bufadd'):", "elif key == 'update_buffer': self._timers[key] = self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr) def", "self._matched_range_id = self._vim.call( 'matchadd', 'deniteMatchedRange', r'\\c' + regex_convert_py_vim(self._matched_pattern), 10, -1,", "str(winwidth)) if not is_current_buffer: self._vim.call('win_gotoid', restore) def _check_do_option(self) -> bool:", "is_async(self) -> bool: return self._is_async def __init__(self, vim: Nvim) ->", "winwidth = max(self._winwidth, 1) is_vertical = split == 'vertical' if", "= ( 'buffer' if split in ['no', 'tab', 'floating', 'floating_relative_window',", "self._vim.current.buffer.vars self._bufvars['denite'] = { 'buffer_name': self._context['buffer_name'], } self._bufvars['denite_statusline'] = {}", "= candidate.get('abbr', candidate['word']).encode( encoding, errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon']) if", "self._vim.command('buffer #') else: self._vim.command('close!') def _quit_buffer(self) -> None: self._cleanup() if", "= '' self._quit_buffer() self._init_denite() self._gather_candidates() self._init_buffer() self._update_candidates() self._update_buffer() def _start_sources_queue(self,", "v else: if self._context['split'] == 'tab': self._vim.command('tabclose!') if self._context['split'] !=", "True self._start_timer('update_buffer') if self._context['search'] and self._context['input']: self._vim.call('setreg', '/', self._context['input']) return", "is called, self._timers may be removed if key in self._timers:", "= [] self._context: UserContext = {} self._bufnr = -1 self._winid", "max_height elif candidates_len != self._winheight: self._winheight = candidates_len max_source_name_len =", "_restart(self) -> None: self._context['input'] = '' self._quit_buffer() self._init_denite() self._gather_candidates() self._init_buffer()", "== 'floating_relative_cursor': opened_pos = (self._vim.call('nvim_win_get_position', 0)[0] + self._vim.call('winline') - 1)", "if split == 'vertical' else ' split' bufname = '[denite]-'", "-> bool: if self._context['do'] != '': self._do_command(self._context['do']) return True elif", "split == 'vertical' if not is_current_buffer: restore = self._vim.call('win_getid') self._vim.call('win_gotoid',", "set filetype option. self._vim.command('silent doautocmd FileType denite') if self._context['auto_action']: self._vim.command('autocmd", "if self._candidates: max_source_name_len = max([ len(self._get_display_source_name(x['source_name'])) for x in self._candidates])", "def _init_cursor(self) -> None: if self._context['reversed']: self._move_to_last_line() else: self._move_to_first_line() def", "self._vim.call('winnr', '$') == 1: self._vim.command('buffer #') else: self._vim.command('close!') def _quit_buffer(self)", "= self._vim.current.buffer.number self._winid = self._vim.call('win_getid') self._resize_buffer(True) self._winheight = self._vim.current.window.height self._winwidth", "'Normal:' + self._context['highlight_window_background'] ) self._vim.command(('syntax match deniteSelectedLine /^[%s].*/' + '", "errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon']) if index in self._selected_candidates else", "'/', self._context['input']) return self._updated def _update_displayed_texts(self) -> None: candidates_len =", "str) -> None: self._init_cursor() cursor = 1 while cursor <", "{} self._matched_range_id = -1 self._matched_char_id = -1 self._check_matchdelete = bool(self._vim.call(", "self._candidates or pos > len(self._candidates): return {} return self._candidates[pos -", "= [] self._cursor = 0 self._entire_len = 0 self._result: typing.List[typing.Any]", "self._floating or self._filter_floating: self._vim.options['titlestring'] = self._titlestring self._vim.options['ruler'] = self._ruler def", "class Default(object): @property def is_async(self) -> bool: return self._is_async def", "+ self._vim.call('winline') - 1) if self._context['auto_resize']: height = max(self._winheight, 1)", "self._vim.call('setpos', '.', self._prev_curpos) if self._get_wininfo() and self._get_wininfo() == self._prev_wininfo: #", "self._context['auto_resize']: winminheight = self._context['winminheight'] max_height = min(self._context['winheight'], self._get_max_height()) if (winminheight", "= (self._context['reversed'] or (is_current_buffer and self._previous_text != self._context['input'])) if self._updated", "_get_candidate(self, pos: int) -> Candidate: if not self._candidates or pos", "'call denite#call_map(\"auto_action\")') self._init_syntax() def _switch_buffer(self) -> None: split = self._context['split']", "is_quit = False if not is_quit and is_manual: self._selected_candidates =", "True) -> None: self._context['is_redraw'] = is_force if is_force: self._gather_candidates() if", "= self._initialized and context['resume'] if resume: # Skip the initialization", "resume: # Skip the initialization update = ('immediately', 'immediately_1', 'cursor_pos',", "p') def _do_command(self, command: str) -> None: self._init_cursor() cursor =", "= 'nofile' options['bufhidden'] = 'delete' options['swapfile'] = False options['buflisted'] =", "to denite window self._vim.call('win_gotoid', self._winid) # Restore the window if", "in self._save_window_options.items(): self._vim.current.window.options[k] = v else: if self._context['split'] == 'tab':", "cursor self._move_to_pos(prev_cursor) # Disable quit flag is_quit = False if", "= 'denite' if self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd WinEnter') if self._vim.call('exists', '#BufWinEnter'):", "= dict() self._displayed_texts = [] self._prev_bufnr = self._vim.current.buffer.number self._prev_curpos =", "keepalt {command}', bufname) def _get_direction(self) -> str: direction = str(self._context['direction'])", "== 'quit' if is_quit: self.quit() self._denite.do_action(self._context, action_name, candidates) self._result =", "self._save_window_options: typing.Dict[str, typing.Any] = {} self._sources_history: typing.List[typing.Any] = [] self._previous_text", "self._vim.command( 'silent keepalt %s %s %s %s' % ( self._get_direction(),", "'height': winheight, }) filter_col = init_pos['col'] if init_pos['anchor'] == 'NW':", "False self._previous_text = self._context['input'] self._resize_buffer(is_current_buffer) is_changed = (self._context['reversed'] or (is_current_buffer", "+ linenr + \"}%*\")) def _get_display_source_name(self, name: str) -> str:", "-> None: self._init_cursor() cursor = 1 while cursor < len(self._candidates):", "= False self._is_multi = len(sources) > 1 if not sources:", "else: width = self._context['winwidth'] height = self._context['winheight'] if opened_pos +", "self._context['post_action'] is_quit = action['is_quit'] or post_action == 'quit' if is_quit:", "else '' command = ( 'buffer' if split in ['no',", "in self._candidates]) self._context['max_source_name_len'] = max_source_name_len self._context['max_source_name_format'] = ( '{:<' +", "self._selected_candidates = [] self.redraw(action['is_redraw']) if is_manual and self._context['sources_queue']: self._context['input'] =", "<<EMAIL> at g<EMAIL>> # License: MIT license # ============================================================================ import", "self._vim.call('line', '.') prev_candidate = self._get_cursor_candidate() buffer = self._vim.buffers[self._bufnr] buffer.options['modifiable'] =", "buffer.options['modifiable'] = True self._vim.vars['denite#_candidates'] = [ x['word'] for x in", "self._matched_range_id = -1 self._matched_char_id = -1 self._check_matchdelete = bool(self._vim.call( 'denite#util#check_matchdelete'))", "'.join(terms).replace('\\n', '') def _get_max_height(self) -> int: return int(self._vim.options['lines']) if not", "[ self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ] def _switch_prev_buffer(self) -> None:", "denite') if re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name): # Ignore command line window.", "'update_buffer': self._timers[key] = self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr) def _stop_timer(self, key: str)", "if winids: # Quit filter buffer self._vim.call('win_gotoid', winids[0]) self._close_current_window() #", "# Extra 'buffer_name': self._context['buffer_name'], 'line_total': len(self._candidates), } if status ==", "'Empty sources') return self._init_denite() self._gather_candidates() self._update_candidates() self._init_cursor() self._check_move_option() if self._check_do_option():", "[] self._save_window_options: typing.Dict[str, typing.Any] = {} self._sources_history: typing.List[typing.Any] = []", "= self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr) elif key == 'update_buffer': self._timers[key] =", "= { 'buffer_name': self._context['buffer_name'], } self._bufvars['denite_statusline'] = {} self._vim.vars['denite#_previewed_buffers'] =", "command = 'edit' if split == 'tab': self._vim.command('tabnew') elif self._floating:", "int(self._context['winrow']) - int(self._vim.options['cmdheight'])) def _resize_buffer(self, is_current_buffer: bool) -> None: split", "self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal nofoldenable') self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal nolist')", "if not is_current_buffer: self._vim.call('win_gotoid', save_winid) elif is_current_buffer: self._vim.call('cursor', [prev_linenr, 0])", "[] self._prev_bufnr = self._vim.current.buffer.number self._prev_curpos = self._vim.call('getcurpos') self._prev_wininfo = self._get_wininfo()", "self._bufnr) < 0: # Denite buffer is already closed return", "re import typing from denite.util import echo, error, clearmatch, regex_convert_py_vim", "key in self._timers: return if key == 'update_candidates': self._timers[key] =", "max_source_name_len = 0 if self._candidates: max_source_name_len = max([ len(self._get_display_source_name(x['source_name'])) for", "else name return source_name def _get_candidate_display_text(self, index: int) -> str:", "+ str(winwidth)) if not is_current_buffer: self._vim.call('win_gotoid', restore) def _check_do_option(self) ->", "self._cursor -= 1 def _move_to_first_line(self) -> None: self._cursor = 1", "self._context != context: self._context.clear() self._context.update(context) self._context['sources'] = sources self._context['is_redraw'] =", "Ignore empty sources. error(self._vim, 'Empty sources') return self._init_denite() self._gather_candidates() self._update_candidates()", "%s %s' % ( self._get_direction(), vertical, command, bufnr, ) )", "self._updated def _update_displayed_texts(self) -> None: candidates_len = len(self._candidates) if not", "= '' self._initialized = False self._winheight = 0 self._winwidth =", "= {} self._sources_history: typing.List[typing.Any] = [] self._previous_text = '' self._floating", "if resume: # Skip the initialization update = ('immediately', 'immediately_1',", "self._denite.on_init(self._context) self._initialized = True self._winheight = self._context['winheight'] self._winwidth = self._context['winwidth']", "options['modifiable'] = False options['filetype'] = 'denite' if self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd", "= '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace(' ', '') )) self._matched_char_id =", "-> bool: return self._is_async def __init__(self, vim: Nvim) -> None:", "typing.Dict[str, typing.Any] = {} self._sources_history: typing.List[typing.Any] = [] self._previous_text =", "width = self._context['winwidth'] height = self._context['winheight'] if opened_pos + height", "self._vim.command('doautocmd BufWinEnter') if not self._vim.call('has', 'nvim'): # In Vim8, FileType", "match deniteSelectedLine /^[%s].*/' + ' contains=deniteConcealedMark') % ( self._context['selected_icon'])) self._vim.command(('syntax", "for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif re.match(r'-\\d+', self._context['cursor_pos']): for _", "def _check_do_option(self) -> bool: if self._context['do'] != '': self._do_command(self._context['do']) return", "quit(self) -> None: if self._denite: self._denite.on_close(self._context) self._quit_buffer() self._result = []", "resize ' + str(winwidth)) if not is_current_buffer: self._vim.call('win_gotoid', restore) def", "self._do_immediately() return True return not (self._context['empty'] or self._is_async or self._candidates)", "Close filter window before preview window self._vim.call('denite#filter#_close_filter_window') if not self._context['has_preview_window']:", "terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon']) if index in self._selected_candidates else ' ')", "( self._get_direction(), vertical, command, bufnr, ) ) else: self._vim.call( 'denite#util#execute_path',", "= self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'win', 'win': init_pos['win'],", "def _split_floating(self, split: str) -> None: # Use floating window", "if self._denite: self._denite.gather_candidates(self._context) def _init_cursor(self) -> None: if self._context['reversed']: self._move_to_last_line()", "[ 'floating', 'floating_relative_cursor', 'floating_relative_window', ] self._filter_floating = False if self._vim.current.buffer.options['filetype']", "if key in self._timers: return if key == 'update_candidates': self._timers[key]", "ruler self._vim.options['ruler'] = False options['buftype'] = 'nofile' options['bufhidden'] = 'delete'", "encoding = self._context['encoding'] abbr = candidate.get('abbr', candidate['word']).encode( encoding, errors='replace').decode(encoding, errors='replace')", "= row + height + opened_pos self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'),", "self._context['sources_queue']: self._context['input'] = '' self._context['quick_move'] = '' self._start_sources_queue(self._context) return def", "_gather_candidates(self) -> None: self._selected_candidates = [] if self._denite: self._denite.gather_candidates(self._context) def", "# Note: Have to use setlocal instead of \"current.window.options\" #", "self._get_cursor_candidate() else [] return [self._candidates[x] for x in self._selected_candidates] def", "\"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']: if self._floating or self._filter_floating: self._vim.options['titlestring'] = (", "def __init__(self, vim: Nvim) -> None: self._vim = vim self._denite:", "0, 'width': width, 'height': height, 'anchor': anchor, }) elif split", "None: candidates_len = len(self._candidates) if not self._is_async and self._context['auto_resize']: winminheight", "-1 self._winid = -1 self._winrestcmd = '' self._initialized = False", "_update_buffer(self) -> None: is_current_buffer = self._bufnr == self._vim.current.buffer.number self._update_status() if", "self._vim.call('nvim_win_set_config', filter_winid, { 'relative': 'editor', 'row': filter_row, 'col': filter_col, })", "'open_filter_buffer', []) def _init_buffer(self) -> None: self._prev_status = dict() self._displayed_texts", "x) > winwidth] if direction == 'dynamictop': direction = 'aboveleft'", "return resume = self._initialized and context['resume'] if resume: # Skip", "' + self._context['highlight_matched_char']) self._vim.command('highlight default link ' + 'deniteStatusLinePath Comment')", "self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) if not is_vertical and self._vim.current.window.height != winheight:", "if is_fit else 'topleft' else: direction = 'belowright' if is_fit", "Ignore command line window. return resume = self._initialized and context['resume']", "or self._vim.call('winnr', '$') == 1): return winheight = max(self._winheight, 1)", "nospell') self._vim.command('setlocal winfixheight') self._vim.command('setlocal nowrap') if self._context['prompt']: self._vim.command('setlocal signcolumn=yes') else:", "# ============================================================================ import re import typing from denite.util import echo,", "+ height + 3 > self._vim.options['lines']: anchor = 'SW' row", "int(self._vim.options['lines']) if not self._floating else ( int(self._vim.options['lines']) - int(self._context['winrow']) -", "in self._selected_candidates else ' ') + ' '.join(terms).replace('\\n', '') def", "def _get_candidate(self, pos: int) -> Candidate: if not self._candidates or", "Restore the position self._vim.call('setpos', '.', self._prev_curpos) if self._get_wininfo() and self._get_wininfo()", "self._floating: self._vim.current.window.options['winhighlight'] = ( 'Normal:' + self._context['highlight_window_background'] ) self._vim.command(('syntax match", "self._is_multi = False self._is_async = False self._matched_pattern = '' self._displayed_texts:", "<gh_stars>0 # ============================================================================ # FILE: default.py # AUTHOR: <NAME> <<EMAIL>", "= -1 self._check_matchdelete = bool(self._vim.call( 'denite#util#check_matchdelete')) def start(self, sources: typing.List[typing.Any],", "= '' self._ruler = False self._prev_action = '' self._prev_status: typing.Dict[str,", "self._vim.call('strwidth', x) > winwidth] if direction == 'dynamictop': direction =", "self._winminheight = -1 self._is_multi = False self._is_async = False self._matched_pattern", "-> None: self._vim.call('cursor', pos, 0) self._cursor = pos if self._context['reversed']:", "self._context['search'] and self._context['input']: self._vim.call('setreg', '/', self._context['input']) return self._updated def _update_displayed_texts(self)", "if self._floating: self._vim.current.window.options['winhighlight'] = ( 'Normal:' + self._context['highlight_window_background'] ) self._vim.command(('syntax", "save_winid) elif is_current_buffer: self._vim.call('cursor', [prev_linenr, 0]) if is_current_buffer: if (self._context['auto_action']", "!= 'tab': self._close_current_window() self._vim.call('win_gotoid', self._prev_winid) # Restore the position self._vim.call('setpos',", "<buffer> ' 'call denite#call_map(\"auto_action\")') self._init_syntax() def _switch_buffer(self) -> None: split", "= \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']: if self._floating or self._filter_floating: self._vim.options['titlestring'] =", "window self._init_buffer() self.do_action('default') candidate = self._get_cursor_candidate() if not candidate: return", "opened_pos else: anchor = 'NW' row = 1 self._context['filter_winrow'] =", "self._cursor >= 1: self._cursor -= 1 def _move_to_first_line(self) -> None:", "self._vim.call('bufnr', '%'), True, { 'relative': 'editor', 'row': self._context['winrow'], 'col': self._context['wincol'],", "self._winheight = 0 self._winwidth = 0 self._winminheight = -1 self._is_multi", "row + opened_pos else: anchor = 'NW' row = 1", "= '' self._prev_status: typing.Dict[str, typing.Any] = {} self._prev_curpos: typing.List[typing.Any] =", "# Jump to denite window self._init_buffer() self.do_action('default') candidate = self._get_cursor_candidate()", "= status linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']: if self._floating or", "= self._context['source_names'] if not self._is_multi or source_names == 'hide': source_name", "return if self._context['start_filter']: do_map(self, 'open_filter_buffer', []) def _init_buffer(self) -> None:", "-> None: if self._denite: self._denite.on_close(self._context) self._quit_buffer() self._result = [] return", "-> None: inpt = '' if self._context['input']: inpt = self._context['input']", "len(self._candidates) def _start_timer(self, key: str) -> None: if key in", "_get_selected_candidates(self) -> Candidates: if not self._selected_candidates: return [self._get_cursor_candidate() ] if", "self._update_buffer() else: self._update_status() self._context['is_redraw'] = False def quit(self) -> None:", "or not candidates or not action_name: return self._prev_action = action_name", "else: self._vim.command('close!') def _quit_buffer(self) -> None: self._cleanup() if self._vim.call('bufwinnr', self._bufnr)", "self._context['winheight'] self._winwidth = self._context['winwidth'] def _gather_candidates(self) -> None: self._selected_candidates =", "# Use floating window if split == 'floating': self._vim.call( 'nvim_open_win',", "self._selected_candidates: typing.List[int] = [] self._candidates: Candidates = [] self._cursor =", "typing.List[typing.Any] = [] self._context: UserContext = {} self._bufnr = -1", "and prev_candidate != self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated = False self._stop_timer('update_buffer') def", "%{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\" + linenr + \"}%*\")) def _get_display_source_name(self, name:", "= self._context['winrow'] row = wincol if split == 'floating': if", "self._bufvars['denite_statusline'] = {} self._vim.vars['denite#_previewed_buffers'] = {} self._save_window_options = {} window_options", "= v else: if self._context['split'] == 'tab': self._vim.command('tabclose!') if self._context['split']", "typing.List[typing.Any] = [] self._previous_text = '' self._floating = False self._filter_floating", "{ 'relative': 'editor', 'row': row, 'col': self._context['wincol'], 'width': winwidth, 'height':", "self._vim.command('setlocal nonumber') self._vim.command('setlocal norelativenumber') self._vim.command('setlocal nospell') self._vim.command('setlocal winfixheight') self._vim.command('setlocal nowrap')", "self._vim.command('silent! autocmd! denite') if re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name): # Ignore command", "flag is_quit = False if not is_quit and is_manual: self._selected_candidates", "range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif self._context['cursor_pos'] == '$': self._move_to_last_line() def _do_immediately(self) ->", "direction def _get_wininfo(self) -> typing.List[typing.Any]: return [ self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'),", "else ( int(self._vim.options['lines']) - int(self._context['winrow']) - int(self._vim.options['cmdheight'])) def _resize_buffer(self, is_current_buffer:", "= False self._updated = False self._timers: typing.Dict[str, int] = {}", "self._vim.call('winrestcmd') self._ruler = self._vim.options['ruler'] self._switch_buffer() self._bufnr = self._vim.current.buffer.number self._winid =", "in self._selected_candidates] def _init_denite(self) -> None: if self._denite: self._denite.start(self._context) self._denite.on_init(self._context)", "True elif split != 'no': command = self._get_direction() command +=", "row += self._context['winheight'] row -= self._winheight self._vim.call('nvim_win_set_config', self._winid, { 'relative':", "candidate: return echo(self._vim, 'Normal', '[{}/{}] {}'.format( self._cursor, len(self._candidates), candidate.get('abbr', candidate['word'])))", "self._check_matchdelete = bool(self._vim.call( 'denite#util#check_matchdelete')) def start(self, sources: typing.List[typing.Any], context: UserContext)", "key in self._timers: self._timers.pop(key) def _split_floating(self, split: str) -> None:", "self._is_multi = len(sources) > 1 if not sources: # Ignore", "and self._context['immediately'] or len(self._candidates) == 1 and self._context['immediately_1']): self._do_immediately() return", "+ \"%#deniteStatusLineNumber#%{\" + linenr + \"}%*\")) def _get_display_source_name(self, name: str)", "self._result def do_action(self, action_name: str, command: str = '', is_manual:", "( self._context['selected_icon'])) self._vim.command(('syntax match deniteConcealedMark /^[ %s]/' + ' conceal", "self._get_cursor_candidate(): candidates = [self._get_cursor_candidate()] else: candidates = [] if not", "[] if not self._denite or not candidates or not action_name:", "cursor = 1 while cursor < len(self._candidates): self.do_action('default', command) self._move_to_next_line()", "if cursor_candidate == self._get_candidate(prev_cursor): # Restore the cursor self._move_to_pos(prev_cursor) #", "= 'edit' if split == 'tab': self._vim.command('tabnew') elif self._floating: self._split_floating(split)", "self._vim.call('bufwinnr', self._bufnr) < 0: # Denite buffer is already closed", "int(self._context['cursor_pos']) + 1 elif re.match(r'\\+\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])):", "= {} window_options = { 'colorcolumn', 'concealcursor', 'conceallevel', 'cursorcolumn', 'cursorline',", "self._result = [] context['sources_queue'] = [sources] self._start_sources_queue(context) return self._result def", "not is_current_buffer: restore = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) if not is_vertical", "if not self._candidates or pos > len(self._candidates): return {} return", "CursorLine') if self._floating or self._filter_floating: self._vim.options['titlestring'] = self._titlestring self._vim.options['ruler'] =", "= max(self._winheight, 1) winwidth = max(self._winwidth, 1) is_vertical = split", "self._denite: typing.Optional[SyncParent] = None self._selected_candidates: typing.List[int] = [] self._candidates: Candidates", "not self._denite or not candidates or not action_name: return self._prev_action", "'deniteStatusLinePath Comment') self._vim.command('highlight default link ' + 'deniteStatusLineNumber LineNR') self._vim.command('highlight", "candidate['word']).encode( encoding, errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon']) if index in", "self._check_do_option(): return self._init_buffer() self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor) if self._context['quick_move'] and do_map(self,", "int) -> str: source_names = self._context['source_names'] candidate = self._candidates[index] terms", "def _get_display_source_name(self, name: str) -> str: source_names = self._context['source_names'] if", "and do_map(self, 'quick_move', []): return if self._context['start_filter']: do_map(self, 'open_filter_buffer', [])", "self._winid = -1 self._winrestcmd = '' self._initialized = False self._winheight", "= [] self._save_window_options: typing.Dict[str, typing.Any] = {} self._sources_history: typing.List[typing.Any] =", "wincol == 1 else row + winheight filter_col = self._context['wincol']", "wincol = self._context['winrow'] row = wincol if split == 'floating':", "and not self._floating: # Move the window to bottom self._vim.command('wincmd", "if self._get_cursor_candidate() else [] return [self._candidates[x] for x in self._selected_candidates]", "# Skip the initialization update = ('immediately', 'immediately_1', 'cursor_pos', 'prev_winid',", "self._vim.command('setlocal nowrap') if self._context['prompt']: self._vim.command('setlocal signcolumn=yes') else: self._vim.command('setlocal signcolumn=auto') if", "else row + winheight filter_col = self._context['wincol'] else: init_pos =", "'#BufWinEnter'): self._vim.command('doautocmd BufWinEnter') if not self._vim.call('has', 'nvim'): # In Vim8,", "= self._vim.call('winwidth', 0) is_fit = not [x for x in", "+ 1 elif re.match(r'\\+\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line()", "def start(self, sources: typing.List[typing.Any], context: UserContext) -> typing.List[typing.Any]: if not", "= False self._previous_text = self._context['input'] self._resize_buffer(is_current_buffer) is_changed = (self._context['reversed'] or", "= self._vim.current.buffer.vars self._bufvars['denite'] = { 'buffer_name': self._context['buffer_name'], } self._bufvars['denite_statusline'] =", "False options['buflisted'] = False options['modeline'] = False options['modifiable'] = False", "# else: self._denite = SyncParent(self._vim) self._result = [] context['sources_queue'] =", "path, # Extra 'buffer_name': self._context['buffer_name'], 'line_total': len(self._candidates), } if status", "# self._denite = ASyncParent(self._vim) # else: self._denite = SyncParent(self._vim) self._result", "direction == 'dynamicbottom': self._update_displayed_texts() winwidth = self._vim.call('winwidth', 0) is_fit =", "= self._vim.call('winrestcmd') self._ruler = self._vim.options['ruler'] self._switch_buffer() self._bufnr = self._vim.current.buffer.number self._winid", "= self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr) def _stop_timer(self, key: str) -> None:", "int: return int(self._vim.options['lines']) if not self._floating else ( int(self._vim.options['lines']) -", "context: UserContext) -> typing.List[typing.Any]: if not self._denite: # if hasattr(self._vim,", "self._context['input'] self._resize_buffer(is_current_buffer) is_changed = (self._context['reversed'] or (is_current_buffer and self._previous_text !=", "return self._init_buffer() if context['refresh']: self.redraw() self._move_to_pos(self._cursor) else: if self._context !=", "-> int: return int(self._vim.options['lines']) if not self._floating else ( int(self._vim.options['lines'])", "{}'.format( self._cursor, len(self._candidates), candidate.get('abbr', candidate['word']))) if goto: # Move to", "sources. error(self._vim, 'Empty sources') return self._init_denite() self._gather_candidates() self._update_candidates() self._init_cursor() self._check_move_option()", "'' self._initialized = False self._winheight = 0 self._winwidth = 0", "max(self._winwidth, 1) is_vertical = split == 'vertical' if not is_current_buffer:", "self._vim.command('enew') else: self._vim.command('buffer ' + str(self._prev_bufnr)) def _init_syntax(self) -> None:", "-> None: split = self._context['split'] if (split == 'no' or", "self._vim.call('win_gotoid', winids[0]) self._close_current_window() # Move to denite window self._vim.call('win_gotoid', self._winid)", "if self._denite: self._denite.on_close(self._context) self._quit_buffer() self._result = [] return def _restart(self)", "or pos > len(self._candidates): return {} return self._candidates[pos - 1]", "or self._filter_floating: self._vim.options['titlestring'] = self._titlestring self._vim.options['ruler'] = self._ruler def _close_current_window(self)", "True, { 'relative': 'cursor', 'row': row, 'col': 0, 'width': width,", "winheight, }) filter_row = 0 if wincol == 1 else", "%s %s %s' % ( self._get_direction(), vertical, command, bufnr, )", "self._is_async def __init__(self, vim: Nvim) -> None: self._vim = vim", "None: if self._denite: self._denite.start(self._context) self._denite.on_init(self._context) self._initialized = True self._winheight =", "self._context['winwidth'], 'height': self._context['winheight'], }) elif split == 'floating_relative_cursor': opened_pos =", "filter_col = self._context['wincol'] else: init_pos = self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config', self._winid,", "str(winheight)) if self._context['reversed']: self._vim.command('normal! zb') elif is_vertical and self._vim.current.window.width !=", "self._start(context['sources_queue'][0], context) if context['sources_queue']: context['sources_queue'].pop(0) context['path'] = self._context['path'] def _start(self,", "+ self._context['highlight_matched_char']) self._vim.command('highlight default link ' + 'deniteStatusLinePath Comment') self._vim.command('highlight", "-> None: if key in self._timers: return if key ==", "winminheight elif candidates_len > max_height: self._winheight = max_height elif candidates_len", "g<EMAIL>> # License: MIT license # ============================================================================ import re import", "sources: # Ignore empty sources. error(self._vim, 'Empty sources') return self._init_denite()", "= self._vim.options['titlestring'] command = 'edit' if split == 'tab': self._vim.command('tabnew')", "UserContext = {} self._bufnr = -1 self._winid = -1 self._winrestcmd", "for k, v in self._save_window_options.items(): self._vim.current.window.options[k] = v else: if", "'%'), True, { 'relative': 'editor', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width':", "!= self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated = False self._stop_timer('update_buffer') def _update_status(self) ->", "== self._bufnr or self._vim.buffers[self._prev_bufnr].name == ''): self._vim.command('enew') else: self._vim.command('buffer '", "_get_wininfo(self) -> typing.List[typing.Any]: return [ self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ]", "= [] self._candidates: Candidates = [] self._cursor = 0 self._entire_len", "self._context['auto_resize'] and row > 1: row += self._context['winheight'] row -=", "'conceallevel', 'cursorcolumn', 'cursorline', 'foldcolumn', 'foldenable', 'list', 'number', 'relativenumber', 'signcolumn', 'spell',", "= False options['buflisted'] = False options['modeline'] = False options['modifiable'] =", "restore) def _check_do_option(self) -> bool: if self._context['do'] != '': self._do_command(self._context['do'])", "= False def quit(self) -> None: if self._denite: self._denite.on_close(self._context) self._quit_buffer()", "pos > len(self._candidates): return {} return self._candidates[pos - 1] def", "direction == 'dynamictop': direction = 'aboveleft' if is_fit else 'topleft'", "nofoldenable') self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal nolist') self._vim.command('setlocal nonumber') self._vim.command('setlocal norelativenumber') self._vim.command('setlocal", "action_name action = self._denite.get_action( self._context, action_name, candidates) if not action:", "return self._prev_action = action_name action = self._denite.get_action( self._context, action_name, candidates)", "self._context['split'] if (split != 'no' and self._winid > 0 and", "# Note: execute restcmd twice to restore layout properly self._vim.command(self._winrestcmd)", "] self._filter_floating = False if self._vim.current.buffer.options['filetype'] != 'denite': self._titlestring =", "% ( self._get_direction(), vertical, command, bufnr, ) ) else: self._vim.call(", "return [self._candidates[x] for x in self._selected_candidates] def _init_denite(self) -> None:", "source_names = self._context['source_names'] candidate = self._candidates[index] terms = [] if", "FileType autocmd is not fired after set filetype option. self._vim.command('silent", "'relative': 'editor', 'row': row, 'col': self._context['wincol'], 'width': winwidth, 'height': winheight,", "False options['filetype'] = 'denite' if self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd WinEnter') if", "'' self._displayed_texts: typing.List[str] = [] self._statusline_sources = '' self._titlestring =", "self._winid) if not is_vertical and self._vim.current.window.height != winheight: if self._floating:", "is_manual and self._context['sources_queue']: self._context['input'] = '' self._context['quick_move'] = '' self._start_sources_queue(self._context)", "command) self._move_to_next_line() self._quit_buffer() def _cleanup(self) -> None: self._stop_timer('update_candidates') self._stop_timer('update_buffer') if", "self._context['input'] + ' ' if self._context['error_messages']: inpt = '[ERROR] '", "{ 'relative': 'win', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height':", "self._quit_buffer() self._result = [] return def _restart(self) -> None: self._context['input']", "self.do_action(self._context['auto_action']) self._updated = False self._stop_timer('update_buffer') def _update_status(self) -> None: inpt", "candidates_len max_source_name_len = 0 if self._candidates: max_source_name_len = max([ len(self._get_display_source_name(x['source_name']))", "self._timers may be removed if key in self._timers: self._timers.pop(key) def", "self._context['path'] + ']' status = { 'input': inpt, 'sources': self._statusline_sources,", "'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'win', 'row': self._context['winrow'], 'col':", "self._vim.call('win_findbuf', bufnr): self._vim.command('silent bdelete ' + str(bufnr)) self._vim.vars['denite#_previewed_buffers'] = {}", "== 'no': self._switch_prev_buffer() for k, v in self._save_window_options.items(): self._vim.current.window.options[k] =", "if self._check_do_option(): return self._init_buffer() self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor) if self._context['quick_move'] and", "direction == 'dynamictop' or direction == 'dynamicbottom': self._update_displayed_texts() winwidth =", "True return not (self._context['empty'] or self._is_async or self._candidates) def _check_move_option(self)", "if self._context['do'] != '': self._do_command(self._context['do']) return True elif (self._candidates and", "denite.parent import SyncParent class Default(object): @property def is_async(self) -> bool:", "self._denite: # if hasattr(self._vim, 'run_coroutine'): # self._denite = ASyncParent(self._vim) #", "self._context['reversed']: self._vim.command('normal! zb') def _move_to_next_line(self) -> None: if self._cursor <", "'relative': 'win', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'],", "self._vim.current.buffer.number self._update_status() if self._check_matchdelete and self._context['match_highlight']: matches = [x['id'] for", "or self._vim.buffers[self._prev_bufnr].name == ''): self._vim.command('enew') else: self._vim.command('buffer ' + str(self._prev_bufnr))", "not self._context['has_preview_window']: self._vim.command('pclose!') # Clear previewed buffers for bufnr in", "= 'vertical' if split == 'vertical' else '' command =", "terms = [] if self._is_multi and source_names != 'hide': terms.append(self._context['max_source_name_format'].format(", "'NW': winpos = self._vim.call('nvim_win_get_position', self._winid) filter_row = winpos[0] + winheight", "in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif re.match(r'-\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])):", "updated: self._updated = True self._start_timer('update_buffer') if self._context['search'] and self._context['input']: self._vim.call('setreg',", "already closed return winids = self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if winids: #", "'' self._context['quick_move'] = '' self._start_sources_queue(self._context) return def redraw(self, is_force: bool", "= self._get_selected_candidates() elif self._get_cursor_candidate(): candidates = [self._get_cursor_candidate()] else: candidates =", "self._floating = False self._filter_floating = False self._updated = False self._timers:", "Comment') self._vim.command('highlight default link ' + 'deniteStatusLineNumber LineNR') self._vim.command('highlight default", "not action_name: return self._prev_action = action_name action = self._denite.get_action( self._context,", "+ ']' status = { 'input': inpt, 'sources': self._statusline_sources, 'path':", "'width': winwidth, 'height': winheight, }) filter_row = 0 if wincol", "-> None: is_current_buffer = self._bufnr == self._vim.current.buffer.number self._update_status() if self._check_matchdelete", "self._context['input'])) if self._updated and is_changed: if not is_current_buffer: save_winid =", "None: self._selected_candidates = [] if self._denite: self._denite.gather_candidates(self._context) def _init_cursor(self) ->", "0 and self._vim.call( 'win_gotoid', self._winid) if goto: # Jump to", "' + 'deniteStatusLineNumber LineNR') self._vim.command('highlight default link ' + 'deniteSelectedLine", "self._cursor = 0 self._entire_len = 0 self._result: typing.List[typing.Any] = []", "-> None: self._context['is_redraw'] = is_force if is_force: self._gather_candidates() if self._update_candidates():", "else: self._stop_timer('update_candidates') updated = (self._displayed_texts != prev_displayed_texts or self._matched_pattern !=", "elif self._context['cursor_pos'] == '$': self._move_to_last_line() def _do_immediately(self) -> None: goto", "'col': filter_col, }) self._vim.command('resize ' + str(winheight)) if self._context['reversed']: self._vim.command('normal!", "winnr, '&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} %=\" + \"%#deniteStatusLinePath#", "filter_winid) > 0: self._vim.call('nvim_win_set_config', filter_winid, { 'relative': 'editor', 'row': filter_row,", "not self._floating: # Move the window to bottom self._vim.command('wincmd J')", "direction = 'belowright' if is_fit else 'botright' return direction def", "= pattern prev_statusline_sources = self._statusline_sources self._statusline_sources = ' '.join(statuses) if", "[self._get_cursor_candidate() ] if self._get_cursor_candidate() else [] return [self._candidates[x] for x", "-> None: from denite.ui.map import do_map self._vim.command('silent! autocmd! denite') if", "'denite#util#check_matchdelete')) def start(self, sources: typing.List[typing.Any], context: UserContext) -> typing.List[typing.Any]: if", "self._context['source_names'] candidate = self._candidates[index] terms = [] if self._is_multi and", "self._candidates[index] terms = [] if self._is_multi and source_names != 'hide':", "= max(self._winheight, 1) width = max(self._winwidth, 1) else: width =", "abbr = candidate.get('abbr', candidate['word']).encode( encoding, errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon'])", "default link ' + 'deniteStatusLineNumber LineNR') self._vim.command('highlight default link '", "= '' else: short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name) if re.search(r'[^a-zA-Z]',", "def _get_direction(self) -> str: direction = str(self._context['direction']) if direction ==", "int] = {} self._matched_range_id = -1 self._matched_char_id = -1 self._check_matchdelete", "self._winid) self._matched_range_id = -1 if self._matched_char_id in matches: self._vim.call('matchdelete', self._matched_char_id,", "}) self._start(context['sources_queue'][0], context) if context['sources_queue']: context['sources_queue'].pop(0) context['path'] = self._context['path'] def", "in self._timers: return self._vim.call('timer_stop', self._timers[key]) # Note: After timer_stop is", "self._vim = vim self._denite: typing.Optional[SyncParent] = None self._selected_candidates: typing.List[int] =", "+ self._context['buffer_name'] if self._vim.call('exists', '*bufadd'): bufnr = self._vim.call('bufadd', bufname) vertical", "str(bufnr)) self._vim.vars['denite#_previewed_buffers'] = {} self._vim.command('highlight! link CursorLine CursorLine') if self._floating", "True self._winheight = self._context['winheight'] self._winwidth = self._context['winwidth'] def _gather_candidates(self) ->", "is_force if is_force: self._gather_candidates() if self._update_candidates(): self._update_buffer() else: self._update_status() self._context['is_redraw']", "{'window': self._winid}) prev_linenr = self._vim.call('line', '.') prev_candidate = self._get_cursor_candidate() buffer", "''): self._vim.command('enew') else: self._vim.command('buffer ' + str(self._prev_bufnr)) def _init_syntax(self) ->", "self._bufvars['denite'] = { 'buffer_name': self._context['buffer_name'], } self._bufvars['denite_statusline'] = {} self._vim.vars['denite#_previewed_buffers']", "window self._vim.command('wincmd p') def _do_command(self, command: str) -> None: self._init_cursor()", "self._candidates) def _check_move_option(self) -> None: if self._context['cursor_pos'].isnumeric(): self._cursor = int(self._context['cursor_pos'])", "1) if self._context['auto_resize']: height = max(self._winheight, 1) width = max(self._winwidth,", "self._vim.call( 'matchadd', 'deniteMatchedChar', matched_char_pattern, 10, -1, {'window': self._winid}) prev_linenr =", "link deniteMatchedRange ' + self._context['highlight_matched_range']) self._vim.command('highlight link deniteMatchedChar ' +", "> 0 and self._vim.call( 'win_gotoid', self._winid) if goto: # Jump", "# Disable ruler self._vim.options['ruler'] = False options['buftype'] = 'nofile' options['bufhidden']", "self._vim.vars['denite#_filter_bufnr']) if winids: # Quit filter buffer self._vim.call('win_gotoid', winids[0]) self._close_current_window()", "'vertical' if not is_current_buffer: restore = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) if", "'row': row, 'col': 0, 'width': width, 'height': height, 'anchor': anchor,", "and context['resume'] if resume: # Skip the initialization update =", "if self._check_do_option(): return self._init_buffer() if context['refresh']: self.redraw() self._move_to_pos(self._cursor) else: if", "return def _restart(self) -> None: self._context['input'] = '' self._quit_buffer() self._init_denite()", "+ 'deniteStatusLinePath Comment') self._vim.command('highlight default link ' + 'deniteStatusLineNumber LineNR')", "' + str(winwidth)) if not is_current_buffer: self._vim.call('win_gotoid', restore) def _check_do_option(self)", "self._winid = self._vim.call('win_getid') self._resize_buffer(True) self._winheight = self._vim.current.window.height self._winwidth = self._vim.current.window.width", "vsplit' if split == 'vertical' else ' split' bufname =", "self._denite = ASyncParent(self._vim) # else: self._denite = SyncParent(self._vim) self._result =", "def _switch_prev_buffer(self) -> None: if (self._prev_bufnr == self._bufnr or self._vim.buffers[self._prev_bufnr].name", "if self._denite: self._denite.start(self._context) self._denite.on_init(self._context) self._initialized = True self._winheight = self._context['winheight']", "split == 'floating_relative_window': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative':", "self._context['is_redraw'] = False def quit(self) -> None: if self._denite: self._denite.on_close(self._context)", "return source_name def _get_candidate_display_text(self, index: int) -> str: source_names =", "= True) -> None: self._context['is_redraw'] = is_force if is_force: self._gather_candidates()", "%{denite#get_status('path')}%*\" + \"%{\" + linenr + \"}%*\") else: winnr =", "window if split == 'floating': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True,", "may be removed if key in self._timers: self._timers.pop(key) def _split_floating(self,", "== 'dynamictop': direction = 'aboveleft' if is_fit else 'topleft' else:", "+ regex_convert_py_vim(self._matched_pattern), 10, -1, {'window': self._winid}) matched_char_pattern = '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])',", "the window to bottom self._vim.command('wincmd J') self._winrestcmd = '' return", "candidate = self._candidates[index] terms = [] if self._is_multi and source_names", "nowrap') if self._context['prompt']: self._vim.command('setlocal signcolumn=yes') else: self._vim.command('setlocal signcolumn=auto') if self._context['cursorline']:", "'no' and self._winid > 0 and self._vim.call('win_gotoid', self._winid)): if split", "self._is_async = False self._matched_pattern = '' self._displayed_texts: typing.List[str] = []", "candidates) self._result = candidates if command != '': self._vim.command(command) if", "# Note: After timer_stop is called, self._timers may be removed", "'': self._vim.command(command) if is_quit and post_action == 'open': # Re-open", "# Quit filter buffer self._vim.call('win_gotoid', winids[0]) self._close_current_window() # Move to", "= self._vim.buffers[self._bufnr] buffer.options['modifiable'] = True self._vim.vars['denite#_candidates'] = [ x['word'] for", "'floating_relative_window', ] self._filter_floating = False if self._vim.current.buffer.options['filetype'] != 'denite': self._titlestring", "self._vim.call('has', 'nvim'): # In Vim8, FileType autocmd is not fired", "echo(self._vim, 'Normal', '[{}/{}] {}'.format( self._cursor, len(self._candidates), candidate.get('abbr', candidate['word']))) if goto:", "source_name def _get_candidate_display_text(self, index: int) -> str: source_names = self._context['source_names']", "None: goto = self._winid > 0 and self._vim.call( 'win_gotoid', self._winid)", "not self._candidates or pos > len(self._candidates): return {} return self._candidates[pos", "= split in [ 'floating', 'floating_relative_cursor', 'floating_relative_window', ] self._filter_floating =", "self._bufnr or self._vim.buffers[self._prev_bufnr].name == ''): self._vim.command('enew') else: self._vim.command('buffer ' +", "+ ' ' if self._context['error_messages']: inpt = '[ERROR] ' +", "Move to the previous window self._vim.command('wincmd p') def _do_command(self, command:", "in ['no', 'tab', 'floating', 'floating_relative_window', 'floating_relative_cursor'] else 'sbuffer') self._vim.command( 'silent", "if not is_current_buffer: self._vim.call('win_gotoid', restore) def _check_do_option(self) -> bool: if", "name[:2]) source_name = short_name if source_names == 'short' else name", "{} self._save_window_options = {} window_options = { 'colorcolumn', 'concealcursor', 'conceallevel',", "self._vim.current.buffer.number self._prev_curpos = self._vim.call('getcurpos') self._prev_wininfo = self._get_wininfo() self._prev_winid = self._context['prev_winid']", "+ winheight filter_col = self._context['wincol'] else: init_pos = self._vim.call('nvim_win_get_config', self._winid)", "bool: if not self._denite: return False [self._is_async, pattern, statuses, self._entire_len,", "and self._vim.call('win_gotoid', self._winid)): if split != 'vertical' and not self._floating:", "ignore') self._vim.command('highlight default link deniteInput ModeMsg') self._vim.command('highlight link deniteMatchedRange '", "linenr + \"}%*\") else: winnr = self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar', winnr,", "str: source_names = self._context['source_names'] candidate = self._candidates[index] terms = []", "if self._context['error_messages']: inpt = '[ERROR] ' + inpt path =", "context: UserContext) -> None: if not context['sources_queue']: return self._sources_history.append({ 'sources':", "self._winheight: self._winheight = candidates_len max_source_name_len = 0 if self._candidates: max_source_name_len", "!= winheight: if self._floating: wincol = self._context['winrow'] row = wincol", "self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'win', 'row': self._context['winrow'],", "-> None: goto = self._winid > 0 and self._vim.call( 'win_gotoid',", "pos, 0) self._cursor = pos if self._context['reversed']: self._vim.command('normal! zb') def", "'winfixheight', 'wrap', } for k in window_options: self._save_window_options[k] = self._vim.current.window.options[k]", "= 1 def _move_to_last_line(self) -> None: self._cursor = len(self._candidates) def", "= False options['modifiable'] = False options['filetype'] = 'denite' if self._vim.call('exists',", "J') self._winrestcmd = '' return self._floating = split in [", "matches: self._vim.call('matchdelete', self._matched_range_id, self._winid) self._matched_range_id = -1 if self._matched_char_id in", "self._vim.options['ruler'] = self._ruler def _close_current_window(self) -> None: if self._vim.call('winnr', '$')", "self._context['winheight'] if opened_pos + height + 3 > self._vim.options['lines']: anchor", "return self._result def do_action(self, action_name: str, command: str = '',", "None: if not context['sources_queue']: return self._sources_history.append({ 'sources': context['sources_queue'][0], 'path': context['path'],", "None: if self._context['cursor_pos'].isnumeric(): self._cursor = int(self._context['cursor_pos']) + 1 elif re.match(r'\\+\\d+',", "if split != 'vertical' and not self._floating: # Move the", "deniteConcealedMark /^[ %s]/' + ' conceal contained') % ( self._context['selected_icon']))", "self._move_to_pos(self._cursor) if not is_current_buffer: self._vim.call('win_gotoid', save_winid) elif is_current_buffer: self._vim.call('cursor', [prev_linenr,", "False options['buftype'] = 'nofile' options['bufhidden'] = 'delete' options['swapfile'] = False", "License: MIT license # ============================================================================ import re import typing from", "self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr) elif key == 'update_buffer': self._timers[key] = self._vim.call(", "\"%{denite#get_status('sources')} \" + \" %{denite#get_status('path')}%*\" + \"%{\" + linenr +", "not self._floating else ( int(self._vim.options['lines']) - int(self._context['winrow']) - int(self._vim.options['cmdheight'])) def", "{ 'relative': 'cursor', 'row': row, 'col': 0, 'width': width, 'height':", "= -1 if self._matched_char_id in matches: self._vim.call('matchdelete', self._matched_char_id, self._winid) self._matched_char_id", "self._close_current_window() # Move to denite window self._vim.call('win_gotoid', self._winid) # Restore", "bufname) def _get_direction(self) -> str: direction = str(self._context['direction']) if direction", "Candidates: if not self._selected_candidates: return [self._get_cursor_candidate() ] if self._get_cursor_candidate() else", "range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif re.match(r'-\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line()", "context['sources_queue'][0], 'path': context['path'], }) self._start(context['sources_queue'][0], context) if context['sources_queue']: context['sources_queue'].pop(0) context['path']", "license # ============================================================================ import re import typing from denite.util import", "'' self._titlestring = '' self._ruler = False self._prev_action = ''", "elif candidates_len > max_height: self._winheight = max_height elif candidates_len !=", "'row': filter_row, 'col': filter_col, }) self._vim.command('resize ' + str(winheight)) if", "self._entire_len = 0 self._result: typing.List[typing.Any] = [] self._context: UserContext =", "instead of local in # neovim. self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal conceallevel=3')", "def _get_candidate_display_text(self, index: int) -> str: source_names = self._context['source_names'] candidate", "In Vim8, FileType autocmd is not fired after set filetype", "0)[0] + self._vim.call('winline') - 1) if self._context['auto_resize']: height = max(self._winheight,", "1: self._cursor -= 1 def _move_to_first_line(self) -> None: self._cursor =", "# AUTHOR: <NAME> <<EMAIL> at g<EMAIL>> # License: MIT license", "# Restore the position self._vim.call('setpos', '.', self._prev_curpos) if self._get_wininfo() and", "option. self._vim.command('silent doautocmd FileType denite') if self._context['auto_action']: self._vim.command('autocmd denite '", "None: if key not in self._timers: return self._vim.call('timer_stop', self._timers[key]) #", "self._sources_history: typing.List[typing.Any] = [] self._previous_text = '' self._floating = False", "def _get_cursor_candidate(self) -> Candidate: return self._get_candidate(self._cursor) def _get_candidate(self, pos: int)", "# Ignore empty sources. error(self._vim, 'Empty sources') return self._init_denite() self._gather_candidates()", "/^[ %s]/' + ' conceal contained') % ( self._context['selected_icon'])) if", "'$') == 1: self._vim.command('buffer #') else: self._vim.command('close!') def _quit_buffer(self) ->", "self._start_timer('update_candidates') else: self._stop_timer('update_candidates') updated = (self._displayed_texts != prev_displayed_texts or self._matched_pattern", "+= ' vsplit' if split == 'vertical' else ' split'", "> 1 if not sources: # Ignore empty sources. error(self._vim,", "to bottom self._vim.command('wincmd J') self._winrestcmd = '' return self._floating =", "self._vim.call( 'matchadd', 'deniteMatchedRange', r'\\c' + regex_convert_py_vim(self._matched_pattern), 10, -1, {'window': self._winid})", "== 'update_buffer': self._timers[key] = self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr) def _stop_timer(self, key:", "self._denite.do_action(self._context, action_name, candidates) self._result = candidates if command != '':", "not is_current_buffer: self._vim.call('win_gotoid', save_winid) elif is_current_buffer: self._vim.call('cursor', [prev_linenr, 0]) if", "wincol if split == 'floating': if self._context['auto_resize'] and row >", "update = ('immediately', 'immediately_1', 'cursor_pos', 'prev_winid', 'start_filter', 'quick_move') for key", "short_name if source_names == 'short' else name return source_name def", "-> None: if self._vim.call('winnr', '$') == 1: self._vim.command('buffer #') else:", "= self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) if not is_vertical and self._vim.current.window.height !=", "filter buffer self._vim.call('win_gotoid', winids[0]) self._close_current_window() # Move to denite window", "not candidate: return echo(self._vim, 'Normal', '[{}/{}] {}'.format( self._cursor, len(self._candidates), candidate.get('abbr',", "the window if self._context['split'] == 'no': self._switch_prev_buffer() for k, v", "start(self, sources: typing.List[typing.Any], context: UserContext) -> typing.List[typing.Any]: if not self._denite:", "'$') == 1): return winheight = max(self._winheight, 1) winwidth =", "self._bufnr: self._cursor = self._vim.call('line', '.') # Note: Close filter window", "def _start_timer(self, key: str) -> None: if key in self._timers:", "\" %{denite#get_status('path')}%*\" + \"%{\" + linenr + \"}%*\") else: winnr", "'row': init_pos['row'], 'col': init_pos['col'], 'width': winwidth, 'height': winheight, }) filter_col", "bottom self._vim.command('wincmd J') self._winrestcmd = '' return self._floating = split", "self._context['encoding'] abbr = candidate.get('abbr', candidate['word']).encode( encoding, errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return", "split == 'floating_relative_cursor': opened_pos = (self._vim.call('nvim_win_get_position', 0)[0] + self._vim.call('winline') -", "@property def is_async(self) -> bool: return self._is_async def __init__(self, vim:", "while cursor < len(self._candidates): self.do_action('default', command) self._move_to_next_line() self._quit_buffer() def _cleanup(self)", "if not self._floating else ( int(self._vim.options['lines']) - int(self._context['winrow']) - int(self._vim.options['cmdheight']))", "self._timers: return if key == 'update_candidates': self._timers[key] = self._vim.call( 'denite#helper#_start_update_candidates_timer',", "global value instead of local in # neovim. self._vim.command('setlocal colorcolumn=')", "None: self._init_cursor() cursor = 1 while cursor < len(self._candidates): self.do_action('default',", "[self._is_async, pattern, statuses, self._entire_len, self._candidates] = self._denite.filter_candidates(self._context) prev_displayed_texts = self._displayed_texts", "self._winheight self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'editor', 'row': row, 'col': self._context['wincol'],", "return True return not (self._context['empty'] or self._is_async or self._candidates) def", "context['resume'] if resume: # Skip the initialization update = ('immediately',", "buffer.options['modifiable'] = False self._previous_text = self._context['input'] self._resize_buffer(is_current_buffer) is_changed = (self._context['reversed']", "for k in window_options: self._save_window_options[k] = self._vim.current.window.options[k] # Note: Have", "contained') % ( self._context['selected_icon'])) if self._denite: self._denite.init_syntax(self._context, self._is_multi) def _update_candidates(self)", "preview window self._vim.call('denite#filter#_close_filter_window') if not self._context['has_preview_window']: self._vim.command('pclose!') # Clear previewed", "candidates_len < winminheight): self._winheight = winminheight elif candidates_len > max_height:", "Re-open denite buffer prev_cursor = self._cursor cursor_candidate = self._get_cursor_candidate() self._init_buffer()", "previewed buffers for bufnr in self._vim.vars['denite#_previewed_buffers'].keys(): if not self._vim.call('win_findbuf', bufnr):", "_init_denite(self) -> None: if self._denite: self._denite.start(self._context) self._denite.on_init(self._context) self._initialized = True", "> winwidth] if direction == 'dynamictop': direction = 'aboveleft' if", "= ( '{:<' + str(self._context['max_source_name_len']) + '}') self._displayed_texts = [", "if not is_current_buffer: save_winid = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) self._init_cursor() self._move_to_pos(self._cursor)", "_update_status(self) -> None: inpt = '' if self._context['input']: inpt =", "== ''): self._vim.command('enew') else: self._vim.command('buffer ' + str(self._prev_bufnr)) def _init_syntax(self)", "1 if not sources: # Ignore empty sources. error(self._vim, 'Empty", "if (winminheight != -1 and candidates_len < winminheight): self._winheight =", "None: self._vim.command('syntax case ignore') self._vim.command('highlight default link deniteInput ModeMsg') self._vim.command('highlight", "None: self._cursor = 1 def _move_to_last_line(self) -> None: self._cursor =", "self._matched_pattern = '' self._displayed_texts: typing.List[str] = [] self._statusline_sources = ''", "candidates_len = len(self._candidates) if not self._is_async and self._context['auto_resize']: winminheight =", "source_name = short_name if source_names == 'short' else name return", "!= prev_matched_pattern or self._statusline_sources != prev_statusline_sources) if updated: self._updated =", "typing.List[typing.Any], context: UserContext) -> None: from denite.ui.map import do_map self._vim.command('silent!", "link deniteInput ModeMsg') self._vim.command('highlight link deniteMatchedRange ' + self._context['highlight_matched_range']) self._vim.command('highlight", "typing.List[str] = [] self._statusline_sources = '' self._titlestring = '' self._ruler", "'relativenumber', 'signcolumn', 'spell', 'winfixheight', 'wrap', } for k in window_options:", "'' self._prev_status: typing.Dict[str, typing.Any] = {} self._prev_curpos: typing.List[typing.Any] = []", "self._vim.current.window.height self._winwidth = self._vim.current.window.width self._bufvars = self._vim.current.buffer.vars self._bufvars['denite'] = {", "'tab': self._vim.command('tabnew') elif self._floating: self._split_floating(split) elif self._context['filter_split_direction'] == 'floating': self._filter_floating", "is_current_buffer: self._vim.call('cursor', [prev_linenr, 0]) if is_current_buffer: if (self._context['auto_action'] and prev_candidate", "winids[0]) self._close_current_window() # Move to denite window self._vim.call('win_gotoid', self._winid) #", "self._get_candidate(self._cursor) def _get_candidate(self, pos: int) -> Candidate: if not self._candidates", "v in self._save_window_options.items(): self._vim.current.window.options[k] = v else: if self._context['split'] ==", "Nvim, UserContext, Candidates, Candidate from denite.parent import SyncParent class Default(object):", "filter_row = winpos[0] + winheight filter_winid = self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] =", "self._context['wincol'], 'width': winwidth, 'height': winheight, }) filter_row = 0 if", "= {} self._vim.command('highlight! link CursorLine CursorLine') if self._floating or self._filter_floating:", "not is_quit and is_manual: self._selected_candidates = [] self.redraw(action['is_redraw']) if is_manual", "link ' + 'deniteSelectedLine Statement') if self._floating: self._vim.current.window.options['winhighlight'] = (", "keepalt %s %s %s %s' % ( self._get_direction(), vertical, command,", "'%'), True, { 'relative': 'win', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width':", "-1, {'window': self._winid}) matched_char_pattern = '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace(' ',", "save_winid = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) self._init_cursor() self._move_to_pos(self._cursor) if not is_current_buffer:", "self._matched_pattern = pattern prev_statusline_sources = self._statusline_sources self._statusline_sources = ' '.join(statuses)", "self._switch_prev_buffer() for k, v in self._save_window_options.items(): self._vim.current.window.options[k] = v else:", "self._filter_floating: self._vim.options['titlestring'] = ( \"%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} \" +", "'tab' or self._vim.call('winnr', '$') == 1): return winheight = max(self._winheight,", "!= 'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding = self._context['encoding'] abbr = candidate.get('abbr',", "self._floating: # Disable ruler self._vim.options['ruler'] = False options['buftype'] = 'nofile'", "Candidates, Candidate from denite.parent import SyncParent class Default(object): @property def", "len(sources) > 1 if not sources: # Ignore empty sources.", "== 1 and self._context['immediately_1']): self._do_immediately() return True return not (self._context['empty']", "= sources self._context['is_redraw'] = False self._is_multi = len(sources) > 1", "goto: # Move to the previous window self._vim.command('wincmd p') def", "self._denite.start(self._context) self._denite.on_init(self._context) self._initialized = True self._winheight = self._context['winheight'] self._winwidth =", "for x in self._displayed_texts if self._vim.call('strwidth', x) > winwidth] if", "prev_statusline_sources) if updated: self._updated = True self._start_timer('update_buffer') if self._context['search'] and", "not self._is_multi or source_names == 'hide': source_name = '' else:", "height = max(self._winheight, 1) width = max(self._winwidth, 1) else: width", "'[ERROR] ' + inpt path = '[' + self._context['path'] +", "'.') # Note: Close filter window before preview window self._vim.call('denite#filter#_close_filter_window')", "self._vim.call('setwinvar', winnr, '&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} %=\" +", "winwidth: self._vim.command('vertical resize ' + str(winwidth)) if not is_current_buffer: self._vim.call('win_gotoid',", "short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name) if re.search(r'[^a-zA-Z]', name) else name[:2])", "self._bufnr) elif key == 'update_buffer': self._timers[key] = self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr)", "def _update_status(self) -> None: inpt = '' if self._context['input']: inpt", "anchor = 'NW' row = 1 self._context['filter_winrow'] = row +", "-> None: if self._denite: self._denite.start(self._context) self._denite.on_init(self._context) self._initialized = True self._winheight", "= [] self._statusline_sources = '' self._titlestring = '' self._ruler =", "_start_timer(self, key: str) -> None: if key in self._timers: return", "AUTHOR: <NAME> <<EMAIL> at g<EMAIL>> # License: MIT license #", "+ self._context['path'] + ']' status = { 'input': inpt, 'sources':", "in range(0, candidates_len) ] def _update_buffer(self) -> None: is_current_buffer =", "cursor < len(self._candidates): self.do_action('default', command) self._move_to_next_line() self._quit_buffer() def _cleanup(self) ->", "Quit filter buffer self._vim.call('win_gotoid', winids[0]) self._close_current_window() # Move to denite", "self._vim.current.window.options[k] # Note: Have to use setlocal instead of \"current.window.options\"", "self._move_to_next_line() self._quit_buffer() def _cleanup(self) -> None: self._stop_timer('update_candidates') self._stop_timer('update_buffer') if self._vim.current.buffer.number", "= [] context['sources_queue'] = [sources] self._start_sources_queue(context) return self._result def do_action(self,", "regex_convert_py_vim(self._matched_pattern), 10, -1, {'window': self._winid}) matched_char_pattern = '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1',", "self._vim.vars['denite#_previewed_buffers'].keys(): if not self._vim.call('win_findbuf', bufnr): self._vim.command('silent bdelete ' + str(bufnr))", "= row + opened_pos else: anchor = 'NW' row =", "def _update_buffer(self) -> None: is_current_buffer = self._bufnr == self._vim.current.buffer.number self._update_status()", "and candidates_len < winminheight): self._winheight = winminheight elif candidates_len >", "else: if self._context != context: self._context.clear() self._context.update(context) self._context['sources'] = sources", "winpos[0] + winheight filter_winid = self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] = row if", "self._prev_curpos) if self._get_wininfo() and self._get_wininfo() == self._prev_wininfo: # Note: execute", "if self._context['input']: inpt = self._context['input'] + ' ' if self._context['error_messages']:", "action_name, candidates) self._result = candidates if command != '': self._vim.command(command)", "{} window_options = { 'colorcolumn', 'concealcursor', 'conceallevel', 'cursorcolumn', 'cursorline', 'foldcolumn',", "self._filter_floating: self._vim.options['titlestring'] = self._titlestring self._vim.options['ruler'] = self._ruler def _close_current_window(self) ->", "if (self._context['auto_action'] and prev_candidate != self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated = False", "key in update: self._context[key] = context[key] self._check_move_option() if self._check_do_option(): return", "self._context['max_source_name_len'] = max_source_name_len self._context['max_source_name_format'] = ( '{:<' + str(self._context['max_source_name_len']) +", "_check_do_option(self) -> bool: if self._context['do'] != '': self._do_command(self._context['do']) return True", "_get_cursor_candidate(self) -> Candidate: return self._get_candidate(self._cursor) def _get_candidate(self, pos: int) ->", "options['swapfile'] = False options['buflisted'] = False options['modeline'] = False options['modifiable']", "else: self._denite = SyncParent(self._vim) self._result = [] context['sources_queue'] = [sources]", "is_fit else 'topleft' else: direction = 'belowright' if is_fit else", "or self._is_async or self._candidates) def _check_move_option(self) -> None: if self._context['cursor_pos'].isnumeric():", "self.redraw() self._move_to_pos(self._cursor) else: if self._context != context: self._context.clear() self._context.update(context) self._context['sources']", "False self._prev_action = '' self._prev_status: typing.Dict[str, typing.Any] = {} self._prev_curpos:", "Jump to denite window self._init_buffer() self.do_action('default') candidate = self._get_cursor_candidate() if", "= self._candidates[index] terms = [] if self._is_multi and source_names !=", "terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding = self._context['encoding'] abbr = candidate.get('abbr', candidate['word']).encode( encoding,", "0) self._cursor = pos if self._context['reversed']: self._vim.command('normal! zb') def _move_to_next_line(self)", "+ '}') self._displayed_texts = [ self._get_candidate_display_text(i) for i in range(0,", "denite ' 'CursorMoved <buffer> ' 'call denite#call_map(\"auto_action\")') self._init_syntax() def _switch_buffer(self)", "# Move the window to bottom self._vim.command('wincmd J') self._winrestcmd =", "1 def _move_to_first_line(self) -> None: self._cursor = 1 def _move_to_last_line(self)", "goto: # Jump to denite window self._init_buffer() self.do_action('default') candidate =", "'win_gotoid', self._winid) if goto: # Jump to denite window self._init_buffer()", "Note: execute restcmd twice to restore layout properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd)", "Candidates = [] self._cursor = 0 self._entire_len = 0 self._result:", "key == 'update_buffer': self._timers[key] = self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr) def _stop_timer(self,", "{} self._prev_curpos: typing.List[typing.Any] = [] self._save_window_options: typing.Dict[str, typing.Any] = {}", "name) if re.search(r'[^a-zA-Z]', name) else name[:2]) source_name = short_name if", "k in window_options: self._save_window_options[k] = self._vim.current.window.options[k] # Note: Have to", "and self._vim.current.window.width != winwidth: self._vim.command('vertical resize ' + str(winwidth)) if", "self._winwidth = 0 self._winminheight = -1 self._is_multi = False self._is_async", "self._vim.command('highlight default link ' + 'deniteStatusLinePath Comment') self._vim.command('highlight default link", "r'\\\\\\1', self._context['input'].replace(' ', '') )) self._matched_char_id = self._vim.call( 'matchadd', 'deniteMatchedChar',", "in self._timers: return if key == 'update_candidates': self._timers[key] = self._vim.call(", "vim self._denite: typing.Optional[SyncParent] = None self._selected_candidates: typing.List[int] = [] self._candidates:", "[] self._context: UserContext = {} self._bufnr = -1 self._winid =", "bool) -> None: split = self._context['split'] if (split == 'no'", "self._timers: return self._vim.call('timer_stop', self._timers[key]) # Note: After timer_stop is called,", "self._selected_candidates: return [self._get_cursor_candidate() ] if self._get_cursor_candidate() else [] return [self._candidates[x]", "str: direction = str(self._context['direction']) if direction == 'dynamictop' or direction", "self._vim.command('setlocal norelativenumber') self._vim.command('setlocal nospell') self._vim.command('setlocal winfixheight') self._vim.command('setlocal nowrap') if self._context['prompt']:", "self._displayed_texts self._update_displayed_texts() prev_matched_pattern = self._matched_pattern self._matched_pattern = pattern prev_statusline_sources =", "elif is_current_buffer: self._vim.call('cursor', [prev_linenr, 0]) if is_current_buffer: if (self._context['auto_action'] and", "self._winheight = max_height elif candidates_len != self._winheight: self._winheight = candidates_len", "if self._context['statusline']: if self._floating or self._filter_floating: self._vim.options['titlestring'] = ( \"%{denite#get_status('input')}%*", "'%'), True, { 'relative': 'cursor', 'row': row, 'col': 0, 'width':", "= self._vim.current.window.height self._winwidth = self._vim.current.window.width self._bufvars = self._vim.current.buffer.vars self._bufvars['denite'] =", "filter_winid, { 'relative': 'editor', 'row': filter_row, 'col': filter_col, }) self._vim.command('resize", "(self._context['reversed'] or (is_current_buffer and self._previous_text != self._context['input'])) if self._updated and", "= '[ERROR] ' + inpt path = '[' + self._context['path']", "return True elif (self._candidates and self._context['immediately'] or len(self._candidates) == 1", "if not self._vim.call('win_findbuf', bufnr): self._vim.command('silent bdelete ' + str(bufnr)) self._vim.vars['denite#_previewed_buffers']", "sources: typing.List[typing.Any], context: UserContext) -> None: from denite.ui.map import do_map", "self._initialized = True self._winheight = self._context['winheight'] self._winwidth = self._context['winwidth'] def", "self._context['buffer_name'] if self._vim.call('exists', '*bufadd'): bufnr = self._vim.call('bufadd', bufname) vertical =", "is_vertical = split == 'vertical' if not is_current_buffer: restore =", "row = 0 self._context['filter_winrow'] = row + opened_pos else: anchor", "is_force: bool = True) -> None: self._context['is_redraw'] = is_force if", "self._vim.options['ruler'] = False options['buftype'] = 'nofile' options['bufhidden'] = 'delete' options['swapfile']", "= 0 self._context['filter_winrow'] = row + opened_pos else: anchor =", "self._vim.call('getcurpos') self._prev_wininfo = self._get_wininfo() self._prev_winid = self._context['prev_winid'] self._winrestcmd = self._vim.call('winrestcmd')", "if (self._prev_bufnr == self._bufnr or self._vim.buffers[self._prev_bufnr].name == ''): self._vim.command('enew') else:", "int) -> Candidate: if not self._candidates or pos > len(self._candidates):", "or self._matched_pattern != prev_matched_pattern or self._statusline_sources != prev_statusline_sources) if updated:", "def _move_to_first_line(self) -> None: self._cursor = 1 def _move_to_last_line(self) ->", "foldcolumn=0') self._vim.command('setlocal nolist') self._vim.command('setlocal nonumber') self._vim.command('setlocal norelativenumber') self._vim.command('setlocal nospell') self._vim.command('setlocal", "'no': self._switch_prev_buffer() for k, v in self._save_window_options.items(): self._vim.current.window.options[k] = v", "do_action(self, action_name: str, command: str = '', is_manual: bool =", "conceallevel=3') self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal nofoldenable') self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal", "+ opened_pos else: anchor = 'NW' row = 1 self._context['filter_winrow']", "command = ( 'buffer' if split in ['no', 'tab', 'floating',", "\"%{\" + linenr + \"}%*\") else: winnr = self._vim.call('win_id2win', self._winid)", "f'silent keepalt {command}', bufname) def _get_direction(self) -> str: direction =", "self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ] def _switch_prev_buffer(self) -> None: if (self._prev_bufnr ==", "in self._vim.call('getmatches', self._winid)] if self._matched_range_id in matches: self._vim.call('matchdelete', self._matched_range_id, self._winid)", "candidate.get('abbr', candidate['word']).encode( encoding, errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon']) if index", "inpt = '[ERROR] ' + inpt path = '[' +", "self._context['winwidth'] def _gather_candidates(self) -> None: self._selected_candidates = [] if self._denite:", "self._cursor = self._vim.call('line', '.') # Note: Close filter window before", "(is_current_buffer and self._previous_text != self._context['input'])) if self._updated and is_changed: if", "context: self._context.clear() self._context.update(context) self._context['sources'] = sources self._context['is_redraw'] = False self._is_multi", "+= self._context['winheight'] row -= self._winheight self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'editor',", "updated = (self._displayed_texts != prev_displayed_texts or self._matched_pattern != prev_matched_pattern or", "context['sources_queue']: context['sources_queue'].pop(0) context['path'] = self._context['path'] def _start(self, sources: typing.List[typing.Any], context:", "-1, {'window': self._winid}) prev_linenr = self._vim.call('line', '.') prev_candidate = self._get_cursor_candidate()", "self._vim.call('bufnr', '%'), True, { 'relative': 'win', 'row': self._context['winrow'], 'col': self._context['wincol'],", "layout properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim) def _get_cursor_candidate(self) -> Candidate: return", "direction = 'aboveleft' if is_fit else 'topleft' else: direction =", "if self._matched_char_id in matches: self._vim.call('matchdelete', self._matched_char_id, self._winid) self._matched_char_id = -1", "1] def _get_selected_candidates(self) -> Candidates: if not self._selected_candidates: return [self._get_cursor_candidate()", "anchor, }) elif split == 'floating_relative_window': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'),", "# Clear previewed buffers for bufnr in self._vim.vars['denite#_previewed_buffers'].keys(): if not", "self._update_displayed_texts() winwidth = self._vim.call('winwidth', 0) is_fit = not [x for", "} for k in window_options: self._save_window_options[k] = self._vim.current.window.options[k] # Note:", "not action: return post_action = self._context['post_action'] is_quit = action['is_quit'] or", "height + 3 > self._vim.options['lines']: anchor = 'SW' row =", "self._bufnr = self._vim.current.buffer.number self._winid = self._vim.call('win_getid') self._resize_buffer(True) self._winheight = self._vim.current.window.height", "self._vim.command('setlocal signcolumn=yes') else: self._vim.command('setlocal signcolumn=auto') if self._context['cursorline']: self._vim.command('setlocal cursorline') options", "Vim8, FileType autocmd is not fired after set filetype option.", "return int(self._vim.options['lines']) if not self._floating else ( int(self._vim.options['lines']) - int(self._context['winrow'])", "if self._context['split'] != 'tab': self._close_current_window() self._vim.call('win_gotoid', self._prev_winid) # Restore the", "= vim self._denite: typing.Optional[SyncParent] = None self._selected_candidates: typing.List[int] = []", "0) is_fit = not [x for x in self._displayed_texts if", "self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ] def _switch_prev_buffer(self) -> None: if", "WinEnter') if self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd BufWinEnter') if not self._vim.call('has', 'nvim'):", "Note: Have to use setlocal instead of \"current.window.options\" # \"current.window.options\"", "[] self._statusline_sources = '' self._titlestring = '' self._ruler = False", "return def redraw(self, is_force: bool = True) -> None: self._context['is_redraw']", "> max_height: self._winheight = max_height elif candidates_len != self._winheight: self._winheight", "False self._filter_floating = False self._updated = False self._timers: typing.Dict[str, int]", "= action_name action = self._denite.get_action( self._context, action_name, candidates) if not", "if not self._denite: # if hasattr(self._vim, 'run_coroutine'): # self._denite =", "self._vim.current.buffer.options if self._floating: # Disable ruler self._vim.options['ruler'] = False options['buftype']", "or len(self._candidates) == 1 and self._context['immediately_1']): self._do_immediately() return True return", "None: if self._denite: self._denite.on_close(self._context) self._quit_buffer() self._result = [] return def", "= self._context['winheight'] self._winwidth = self._context['winwidth'] def _gather_candidates(self) -> None: self._selected_candidates", "'anchor': anchor, }) elif split == 'floating_relative_window': self._vim.call( 'nvim_open_win', self._vim.call('bufnr',", "prev_displayed_texts or self._matched_pattern != prev_matched_pattern or self._statusline_sources != prev_statusline_sources) if", "== 'NW': winpos = self._vim.call('nvim_win_get_position', self._winid) filter_row = winpos[0] +", "len(self._candidates) if not self._is_async and self._context['auto_resize']: winminheight = self._context['winminheight'] max_height", "linenr + \"}%*\")) def _get_display_source_name(self, name: str) -> str: source_names", "self._context['split'] == 'tab': self._vim.command('tabclose!') if self._context['split'] != 'tab': self._close_current_window() self._vim.call('win_gotoid',", "== self._prev_status: return self._bufvars['denite_statusline'] = status self._prev_status = status linenr", "return {} return self._candidates[pos - 1] def _get_selected_candidates(self) -> Candidates:", "if opened_pos + height + 3 > self._vim.options['lines']: anchor =", "'cursorcolumn', 'cursorline', 'foldcolumn', 'foldenable', 'list', 'number', 'relativenumber', 'signcolumn', 'spell', 'winfixheight',", "else: direction = 'belowright' if is_fit else 'botright' return direction", "[] self._cursor = 0 self._entire_len = 0 self._result: typing.List[typing.Any] =", "== 'floating': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'editor',", "!= 'no' and self._winid > 0 and self._vim.call('win_gotoid', self._winid)): if", "== 'tab': self._vim.command('tabnew') elif self._floating: self._split_floating(split) elif self._context['filter_split_direction'] == 'floating':", "str) -> str: source_names = self._context['source_names'] if not self._is_multi or", "= self._vim.current.buffer.options if self._floating: # Disable ruler self._vim.options['ruler'] = False", "= [x['id'] for x in self._vim.call('getmatches', self._winid)] if self._matched_range_id in", "and self._get_wininfo() == self._prev_wininfo: # Note: execute restcmd twice to", "candidates = self._get_selected_candidates() elif self._get_cursor_candidate(): candidates = [self._get_cursor_candidate()] else: candidates", "'tab': self._close_current_window() self._vim.call('win_gotoid', self._prev_winid) # Restore the position self._vim.call('setpos', '.',", "self._context['input']) return self._updated def _update_displayed_texts(self) -> None: candidates_len = len(self._candidates)", "typing.List[typing.Any] = [] self._save_window_options: typing.Dict[str, typing.Any] = {} self._sources_history: typing.List[typing.Any]", "self._vim.current.window.height != winheight: if self._floating: wincol = self._context['winrow'] row =", "True elif (self._candidates and self._context['immediately'] or len(self._candidates) == 1 and", "if not action: return post_action = self._context['post_action'] is_quit = action['is_quit']", "self._vim.command('highlight! link CursorLine CursorLine') if self._floating or self._filter_floating: self._vim.options['titlestring'] =", "_ in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif re.match(r'-\\d+', self._context['cursor_pos']): for _ in", "= self._denite.filter_candidates(self._context) prev_displayed_texts = self._displayed_texts self._update_displayed_texts() prev_matched_pattern = self._matched_pattern self._matched_pattern", "for x in self._candidates]) self._context['max_source_name_len'] = max_source_name_len self._context['max_source_name_format'] = (", "init_pos = self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'win', 'win':", "self._candidates] = self._denite.filter_candidates(self._context) prev_displayed_texts = self._displayed_texts self._update_displayed_texts() prev_matched_pattern = self._matched_pattern", "'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding = self._context['encoding'] abbr = candidate.get('abbr', candidate['word']).encode(", "context['sources_queue']: return self._sources_history.append({ 'sources': context['sources_queue'][0], 'path': context['path'], }) self._start(context['sources_queue'][0], context)", "max_height: self._winheight = max_height elif candidates_len != self._winheight: self._winheight =", "_init_buffer(self) -> None: self._prev_status = dict() self._displayed_texts = [] self._prev_bufnr", "Note: After timer_stop is called, self._timers may be removed if", "re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name): # Ignore command line window. return resume", "command, bufnr, ) ) else: self._vim.call( 'denite#util#execute_path', f'silent keepalt {command}',", "-> None: if self._cursor < len(self._candidates): self._cursor += 1 def", "self._vim.call('exists', '*bufadd'): bufnr = self._vim.call('bufadd', bufname) vertical = 'vertical' if", "doautocmd FileType denite') if self._context['auto_action']: self._vim.command('autocmd denite ' 'CursorMoved <buffer>", "{command}', bufname) def _get_direction(self) -> str: direction = str(self._context['direction']) if", "source_names == 'hide': source_name = '' else: short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+',", "str) -> None: if key not in self._timers: return self._vim.call('timer_stop',", "buffer prev_cursor = self._cursor cursor_candidate = self._get_cursor_candidate() self._init_buffer() self.redraw(False) if", "not (self._context['empty'] or self._is_async or self._candidates) def _check_move_option(self) -> None:", "+ opened_pos self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'cursor',", "'delete' options['swapfile'] = False options['buflisted'] = False options['modeline'] = False", "\"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\" + linenr + \"}%*\")) def _get_display_source_name(self,", "restore layout properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim) def _get_cursor_candidate(self) -> Candidate:", "if not self._denite or not candidates or not action_name: return", "'denite#util#execute_path', f'silent keepalt {command}', bufname) def _get_direction(self) -> str: direction", "is_changed: if not is_current_buffer: save_winid = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) self._init_cursor()", "_switch_prev_buffer(self) -> None: if (self._prev_bufnr == self._bufnr or self._vim.buffers[self._prev_bufnr].name ==", "-> None: if not context['sources_queue']: return self._sources_history.append({ 'sources': context['sources_queue'][0], 'path':", "self._matched_char_id in matches: self._vim.call('matchdelete', self._matched_char_id, self._winid) self._matched_char_id = -1 if", "row = 1 self._context['filter_winrow'] = row + height + opened_pos", "filter_row = 0 if wincol == 1 else row +", "-> None: if self._context['cursor_pos'].isnumeric(): self._cursor = int(self._context['cursor_pos']) + 1 elif", "self._matched_char_id = -1 if self._matched_pattern != '': self._matched_range_id = self._vim.call(", "self._context['match_highlight']: matches = [x['id'] for x in self._vim.call('getmatches', self._winid)] if", "'hide': source_name = '' else: short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name)", "self._context['filter_winrow'] = row + height + opened_pos self._vim.call( 'nvim_open_win', self._vim.call('bufnr',", "== 'open': # Re-open denite buffer prev_cursor = self._cursor cursor_candidate", "self._init_denite() self._gather_candidates() self._update_candidates() self._init_cursor() self._check_move_option() if self._check_do_option(): return self._init_buffer() self._update_displayed_texts()", "init_pos['col'] if init_pos['anchor'] == 'NW': winpos = self._vim.call('nvim_win_get_position', self._winid) filter_row", "do_map(self, 'quick_move', []): return if self._context['start_filter']: do_map(self, 'open_filter_buffer', []) def", "if self._vim.current.buffer.options['filetype'] != 'denite': self._titlestring = self._vim.options['titlestring'] command = 'edit'", "self._ruler = self._vim.options['ruler'] self._switch_buffer() self._bufnr = self._vim.current.buffer.number self._winid = self._vim.call('win_getid')", "'floating_relative_cursor': opened_pos = (self._vim.call('nvim_win_get_position', 0)[0] + self._vim.call('winline') - 1) if", "= split == 'vertical' if not is_current_buffer: restore = self._vim.call('win_getid')", "self._winid) self._matched_char_id = -1 if self._matched_pattern != '': self._matched_range_id =", "+ winheight filter_winid = self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] = row if self._vim.call('win_id2win',", "-> None: self._cursor = 1 def _move_to_last_line(self) -> None: self._cursor", "prev_matched_pattern or self._statusline_sources != prev_statusline_sources) if updated: self._updated = True", "# Denite buffer is already closed return winids = self._vim.call('win_findbuf',", "if self._context['reversed']: self._vim.command('normal! zb') def _move_to_next_line(self) -> None: if self._cursor", "\"%{denite#get_status('sources')} %=\" + \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\" + linenr +", "is_fit else 'botright' return direction def _get_wininfo(self) -> typing.List[typing.Any]: return", "elif self._get_cursor_candidate(): candidates = [self._get_cursor_candidate()] else: candidates = [] if", "restcmd twice to restore layout properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim) def", "str) -> None: if key in self._timers: return if key", "============================================================================ # FILE: default.py # AUTHOR: <NAME> <<EMAIL> at g<EMAIL>>", "-> None: if (self._prev_bufnr == self._bufnr or self._vim.buffers[self._prev_bufnr].name == ''):", "def _update_candidates(self) -> bool: if not self._denite: return False [self._is_async,", "self._vim.command(command) if is_quit and post_action == 'open': # Re-open denite", "in update: self._context[key] = context[key] self._check_move_option() if self._check_do_option(): return self._init_buffer()", "'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'editor', 'row': self._context['winrow'], 'col':", "= '' self._start_sources_queue(self._context) return def redraw(self, is_force: bool = True)", "return if key == 'update_candidates': self._timers[key] = self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr)", "of local in # neovim. self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal", "-> None: split = self._context['split'] if (split != 'no' and", "import echo, error, clearmatch, regex_convert_py_vim from denite.util import Nvim, UserContext,", "= self._context['winwidth'] def _gather_candidates(self) -> None: self._selected_candidates = [] if", "== 'tab': self._vim.command('tabclose!') if self._context['split'] != 'tab': self._close_current_window() self._vim.call('win_gotoid', self._prev_winid)", "None: if self._cursor >= 1: self._cursor -= 1 def _move_to_first_line(self)", "1 def _move_to_prev_line(self) -> None: if self._cursor >= 1: self._cursor", "denite window self._init_buffer() self.do_action('default') candidate = self._get_cursor_candidate() if not candidate:", "re.match(r'-\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif self._context['cursor_pos'] ==", "UserContext) -> None: from denite.ui.map import do_map self._vim.command('silent! autocmd! denite')", "self._timers: self._timers.pop(key) def _split_floating(self, split: str) -> None: # Use", "else: anchor = 'NW' row = 1 self._context['filter_winrow'] = row", "= True self._winheight = self._context['winheight'] self._winwidth = self._context['winwidth'] def _gather_candidates(self)", "self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'editor', 'row': self._context['winrow'],", "= len(self._candidates) def _start_timer(self, key: str) -> None: if key", "split' bufname = '[denite]-' + self._context['buffer_name'] if self._vim.call('exists', '*bufadd'): bufnr", "'deniteSelectedLine Statement') if self._floating: self._vim.current.window.options['winhighlight'] = ( 'Normal:' + self._context['highlight_window_background']", "self._winid > 0 and self._vim.call('win_gotoid', self._winid)): if split != 'vertical'", "str(self._context['max_source_name_len']) + '}') self._displayed_texts = [ self._get_candidate_display_text(i) for i in", "in self._displayed_texts if self._vim.call('strwidth', x) > winwidth] if direction ==", "self._titlestring = self._vim.options['titlestring'] command = 'edit' if split == 'tab':", "is_vertical and self._vim.current.window.width != winwidth: self._vim.command('vertical resize ' + str(winwidth))", "split != 'no': command = self._get_direction() command += ' vsplit'", ">= 1: self._cursor -= 1 def _move_to_first_line(self) -> None: self._cursor", "prev_candidate != self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated = False self._stop_timer('update_buffer') def _update_status(self)", "self._vim.options['lines']: anchor = 'SW' row = 0 self._context['filter_winrow'] = row", "self._switch_buffer() self._bufnr = self._vim.current.buffer.number self._winid = self._vim.call('win_getid') self._resize_buffer(True) self._winheight =", "if self._context['reversed']: self._vim.command('normal! zb') elif is_vertical and self._vim.current.window.width != winwidth:", "= self._vim.options['ruler'] self._switch_buffer() self._bufnr = self._vim.current.buffer.number self._winid = self._vim.call('win_getid') self._resize_buffer(True)", "1) width = max(self._winwidth, 1) else: width = self._context['winwidth'] height", "bdelete ' + str(bufnr)) self._vim.vars['denite#_previewed_buffers'] = {} self._vim.command('highlight! link CursorLine", "candidates_len) ] def _update_buffer(self) -> None: is_current_buffer = self._bufnr ==", "= -1 self._matched_char_id = -1 self._check_matchdelete = bool(self._vim.call( 'denite#util#check_matchdelete')) def", "[ self._get_candidate_display_text(i) for i in range(0, candidates_len) ] def _update_buffer(self)", "from denite.util import echo, error, clearmatch, regex_convert_py_vim from denite.util import", "self._vim.call('nvim_win_get_position', self._winid) filter_row = winpos[0] + winheight filter_winid = self._vim.vars['denite#_filter_winid']", "nolist') self._vim.command('setlocal nonumber') self._vim.command('setlocal norelativenumber') self._vim.command('setlocal nospell') self._vim.command('setlocal winfixheight') self._vim.command('setlocal", "> 0: self._vim.call('nvim_win_set_config', filter_winid, { 'relative': 'editor', 'row': filter_row, 'col':", "= {} self._bufnr = -1 self._winid = -1 self._winrestcmd =", "return self._bufvars['denite_statusline'] = status self._prev_status = status linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\"", "anchor = 'SW' row = 0 self._context['filter_winrow'] = row +", "cursorline') options = self._vim.current.buffer.options if self._floating: # Disable ruler self._vim.options['ruler']", "'start_filter', 'quick_move') for key in update: self._context[key] = context[key] self._check_move_option()", "> len(self._candidates): return {} return self._candidates[pos - 1] def _get_selected_candidates(self)", "['no', 'tab', 'floating', 'floating_relative_window', 'floating_relative_cursor'] else 'sbuffer') self._vim.command( 'silent keepalt", "self._move_to_next_line() elif re.match(r'-\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif", "self._vim.options['titlestring'] command = 'edit' if split == 'tab': self._vim.command('tabnew') elif", "self._candidates] buffer[:] = self._displayed_texts buffer.options['modifiable'] = False self._previous_text = self._context['input']", "initialization update = ('immediately', 'immediately_1', 'cursor_pos', 'prev_winid', 'start_filter', 'quick_move') for", "self.do_action('default') candidate = self._get_cursor_candidate() if not candidate: return echo(self._vim, 'Normal',", "self._selected_candidates = [] if self._denite: self._denite.gather_candidates(self._context) def _init_cursor(self) -> None:", "= '' self._context['quick_move'] = '' self._start_sources_queue(self._context) return def redraw(self, is_force:", "context['path'], }) self._start(context['sources_queue'][0], context) if context['sources_queue']: context['sources_queue'].pop(0) context['path'] = self._context['path']", "self._matched_pattern self._matched_pattern = pattern prev_statusline_sources = self._statusline_sources self._statusline_sources = '", "'buffer_name': self._context['buffer_name'], 'line_total': len(self._candidates), } if status == self._prev_status: return", "winminheight = self._context['winminheight'] max_height = min(self._context['winheight'], self._get_max_height()) if (winminheight !=", "False self._winheight = 0 self._winwidth = 0 self._winminheight = -1", "'tab', 'floating', 'floating_relative_window', 'floating_relative_cursor'] else 'sbuffer') self._vim.command( 'silent keepalt %s", "buffer = self._vim.buffers[self._bufnr] buffer.options['modifiable'] = True self._vim.vars['denite#_candidates'] = [ x['word']", "self._bufvars['denite_statusline'] = status self._prev_status = status linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if", "self._init_buffer() if context['refresh']: self.redraw() self._move_to_pos(self._cursor) else: if self._context != context:", "!= '': self._matched_range_id = self._vim.call( 'matchadd', 'deniteMatchedRange', r'\\c' + regex_convert_py_vim(self._matched_pattern),", "\" + \"%{denite#get_status('sources')} %=\" + \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\" +", "= (self._displayed_texts != prev_displayed_texts or self._matched_pattern != prev_matched_pattern or self._statusline_sources", "self._cursor = 1 def _move_to_last_line(self) -> None: self._cursor = len(self._candidates)", "self._context['prompt']: self._vim.command('setlocal signcolumn=yes') else: self._vim.command('setlocal signcolumn=auto') if self._context['cursorline']: self._vim.command('setlocal cursorline')", "'vertical' else ' split' bufname = '[denite]-' + self._context['buffer_name'] if", "_init_cursor(self) -> None: if self._context['reversed']: self._move_to_last_line() else: self._move_to_first_line() def _move_to_pos(self,", "= '[' + self._context['path'] + ']' status = { 'input':", "denite.util import echo, error, clearmatch, regex_convert_py_vim from denite.util import Nvim,", "self._vim.vars['denite#_previewed_buffers'] = {} self._save_window_options = {} window_options = { 'colorcolumn',", "if self._vim.call('win_id2win', filter_winid) > 0: self._vim.call('nvim_win_set_config', filter_winid, { 'relative': 'editor',", "self._vim.call('line', '.') # Note: Close filter window before preview window", "self._filter_floating = False if self._vim.current.buffer.options['filetype'] != 'denite': self._titlestring = self._vim.options['titlestring']", "window if self._context['split'] == 'no': self._switch_prev_buffer() for k, v in", "'#WinEnter'): self._vim.command('doautocmd WinEnter') if self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd BufWinEnter') if not", "'matchadd', 'deniteMatchedRange', r'\\c' + regex_convert_py_vim(self._matched_pattern), 10, -1, {'window': self._winid}) matched_char_pattern", "command = self._get_direction() command += ' vsplit' if split ==", "_move_to_first_line(self) -> None: self._cursor = 1 def _move_to_last_line(self) -> None:", "{} return self._candidates[pos - 1] def _get_selected_candidates(self) -> Candidates: if", "window. return resume = self._initialized and context['resume'] if resume: #", "Skip the initialization update = ('immediately', 'immediately_1', 'cursor_pos', 'prev_winid', 'start_filter',", "if is_force: self._gather_candidates() if self._update_candidates(): self._update_buffer() else: self._update_status() self._context['is_redraw'] =", "1 elif re.match(r'\\+\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif", "[sources] self._start_sources_queue(context) return self._result def do_action(self, action_name: str, command: str", "or self._candidates) def _check_move_option(self) -> None: if self._context['cursor_pos'].isnumeric(): self._cursor =", "nocursorcolumn') self._vim.command('setlocal nofoldenable') self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal nolist') self._vim.command('setlocal nonumber') self._vim.command('setlocal", "def _resize_buffer(self, is_current_buffer: bool) -> None: split = self._context['split'] if", "not self._denite: # if hasattr(self._vim, 'run_coroutine'): # self._denite = ASyncParent(self._vim)", "not context['sources_queue']: return self._sources_history.append({ 'sources': context['sources_queue'][0], 'path': context['path'], }) self._start(context['sources_queue'][0],", "self._context['highlight_matched_range']) self._vim.command('highlight link deniteMatchedChar ' + self._context['highlight_matched_char']) self._vim.command('highlight default link", "self._vim.command('normal! zb') def _move_to_next_line(self) -> None: if self._cursor < len(self._candidates):", "if context['refresh']: self.redraw() self._move_to_pos(self._cursor) else: if self._context != context: self._context.clear()", "Restore the cursor self._move_to_pos(prev_cursor) # Disable quit flag is_quit =", "self._matched_range_id = -1 if self._matched_char_id in matches: self._vim.call('matchdelete', self._matched_char_id, self._winid)", "self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] = row if self._vim.call('win_id2win', filter_winid) > 0: self._vim.call('nvim_win_set_config',", "max(self._winheight, 1) winwidth = max(self._winwidth, 1) is_vertical = split ==", "elif re.match(r'-\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif self._context['cursor_pos']", "[] return def _restart(self) -> None: self._context['input'] = '' self._quit_buffer()", "in # neovim. self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal", "== 'floating_relative_window': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'win',", "min(self._context['winheight'], self._get_max_height()) if (winminheight != -1 and candidates_len < winminheight):", "is already closed return winids = self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if winids:", "self._cleanup() if self._vim.call('bufwinnr', self._bufnr) < 0: # Denite buffer is", "self._initialized and context['resume'] if resume: # Skip the initialization update", "max_source_name_len = max([ len(self._get_display_source_name(x['source_name'])) for x in self._candidates]) self._context['max_source_name_len'] =", "[]) def _init_buffer(self) -> None: self._prev_status = dict() self._displayed_texts =", "'denite': self._titlestring = self._vim.options['titlestring'] command = 'edit' if split ==", "= [] if not self._denite or not candidates or not", "denite.util import Nvim, UserContext, Candidates, Candidate from denite.parent import SyncParent", "setlocal instead of \"current.window.options\" # \"current.window.options\" changes global value instead", "'botright' return direction def _get_wininfo(self) -> typing.List[typing.Any]: return [ self._vim.options['columns'],", "autocmd! denite') if re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name): # Ignore command line", "self._get_direction() command += ' vsplit' if split == 'vertical' else", "winnr = self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar', winnr, '&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%* \"", "self._vim.command('buffer ' + str(self._prev_bufnr)) def _init_syntax(self) -> None: self._vim.command('syntax case", "self._matched_char_id, self._winid) self._matched_char_id = -1 if self._matched_pattern != '': self._matched_range_id", "or split == 'tab' or self._vim.call('winnr', '$') == 1): return", "else: winnr = self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar', winnr, '&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%*", "self._context['selected_icon'])) self._vim.command(('syntax match deniteConcealedMark /^[ %s]/' + ' conceal contained')", "self._result = candidates if command != '': self._vim.command(command) if is_quit", "else ' ') + ' '.join(terms).replace('\\n', '') def _get_max_height(self) ->", "_ in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif self._context['cursor_pos'] == '$': self._move_to_last_line() def", "max([ len(self._get_display_source_name(x['source_name'])) for x in self._candidates]) self._context['max_source_name_len'] = max_source_name_len self._context['max_source_name_format']", "= self._vim.current.window.options[k] # Note: Have to use setlocal instead of", "= winpos[0] + winheight filter_winid = self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] = row", "self._vim.command('resize ' + str(winheight)) if self._context['reversed']: self._vim.command('normal! zb') elif is_vertical", "window self._vim.call('denite#filter#_close_filter_window') if not self._context['has_preview_window']: self._vim.command('pclose!') # Clear previewed buffers", "] def _switch_prev_buffer(self) -> None: if (self._prev_bufnr == self._bufnr or", "self._context['do'] != '': self._do_command(self._context['do']) return True elif (self._candidates and self._context['immediately']", "self._is_async: self._start_timer('update_candidates') else: self._stop_timer('update_candidates') updated = (self._displayed_texts != prev_displayed_texts or", "UserContext, Candidates, Candidate from denite.parent import SyncParent class Default(object): @property", "def _init_syntax(self) -> None: self._vim.command('syntax case ignore') self._vim.command('highlight default link", "# Move to denite window self._vim.call('win_gotoid', self._winid) # Restore the", "= self._context['input'] + ' ' if self._context['error_messages']: inpt = '[ERROR]", "self._previous_text = self._context['input'] self._resize_buffer(is_current_buffer) is_changed = (self._context['reversed'] or (is_current_buffer and", "self._vim.options['ruler'] self._switch_buffer() self._bufnr = self._vim.current.buffer.number self._winid = self._vim.call('win_getid') self._resize_buffer(True) self._winheight", "1 and self._context['immediately_1']): self._do_immediately() return True return not (self._context['empty'] or", "self._statusline_sources self._statusline_sources = ' '.join(statuses) if self._is_async: self._start_timer('update_candidates') else: self._stop_timer('update_candidates')", "action = self._denite.get_action( self._context, action_name, candidates) if not action: return", "= [ x['word'] for x in self._candidates] buffer[:] = self._displayed_texts", "if self._context['quick_move'] and do_map(self, 'quick_move', []): return if self._context['start_filter']: do_map(self,", "concealcursor=inv') self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal nofoldenable') self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal nolist') self._vim.command('setlocal", "if is_quit: self.quit() self._denite.do_action(self._context, action_name, candidates) self._result = candidates if", "+ \"}%*\") else: winnr = self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar', winnr, '&statusline',", "source_names != 'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding = self._context['encoding'] abbr =", "post_action = self._context['post_action'] is_quit = action['is_quit'] or post_action == 'quit'", "def redraw(self, is_force: bool = True) -> None: self._context['is_redraw'] =", "if self._denite: self._denite.init_syntax(self._context, self._is_multi) def _update_candidates(self) -> bool: if not", "self.redraw(False) if cursor_candidate == self._get_candidate(prev_cursor): # Restore the cursor self._move_to_pos(prev_cursor)", "self._statusline_sources, 'path': path, # Extra 'buffer_name': self._context['buffer_name'], 'line_total': len(self._candidates), }", "def _start_sources_queue(self, context: UserContext) -> None: if not context['sources_queue']: return", "conceal contained') % ( self._context['selected_icon'])) if self._denite: self._denite.init_syntax(self._context, self._is_multi) def", "' + str(bufnr)) self._vim.vars['denite#_previewed_buffers'] = {} self._vim.command('highlight! link CursorLine CursorLine')", "self._statusline_sources != prev_statusline_sources) if updated: self._updated = True self._start_timer('update_buffer') if", "_move_to_last_line(self) -> None: self._cursor = len(self._candidates) def _start_timer(self, key: str)", "if direction == 'dynamictop' or direction == 'dynamicbottom': self._update_displayed_texts() winwidth", "link ' + 'deniteStatusLineNumber LineNR') self._vim.command('highlight default link ' +", "winheight: if self._floating: wincol = self._context['winrow'] row = wincol if", "options = self._vim.current.buffer.options if self._floating: # Disable ruler self._vim.options['ruler'] =", "for i in range(0, candidates_len) ] def _update_buffer(self) -> None:", "_switch_buffer(self) -> None: split = self._context['split'] if (split != 'no'", "', '') )) self._matched_char_id = self._vim.call( 'matchadd', 'deniteMatchedChar', matched_char_pattern, 10,", "matched_char_pattern = '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace(' ', '') )) self._matched_char_id", "'list', 'number', 'relativenumber', 'signcolumn', 'spell', 'winfixheight', 'wrap', } for k", "'quick_move') for key in update: self._context[key] = context[key] self._check_move_option() if", "[] if self._is_multi and source_names != 'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding", "and self._context['sources_queue']: self._context['input'] = '' self._context['quick_move'] = '' self._start_sources_queue(self._context) return", "int(self._vim.options['lines']) - int(self._context['winrow']) - int(self._vim.options['cmdheight'])) def _resize_buffer(self, is_current_buffer: bool) ->", "if (split == 'no' or split == 'tab' or self._vim.call('winnr',", "init_pos['col'], 'width': winwidth, 'height': winheight, }) filter_col = init_pos['col'] if", "None: self._context['is_redraw'] = is_force if is_force: self._gather_candidates() if self._update_candidates(): self._update_buffer()", "self._context['winheight'] row -= self._winheight self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'editor', 'row':", "+ height + opened_pos self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, {", "= self._bufnr == self._vim.current.buffer.number self._update_status() if self._check_matchdelete and self._context['match_highlight']: matches", "self._vim.call('win_gotoid', self._winid) self._init_cursor() self._move_to_pos(self._cursor) if not is_current_buffer: self._vim.call('win_gotoid', save_winid) elif", "}) elif split == 'floating_relative_cursor': opened_pos = (self._vim.call('nvim_win_get_position', 0)[0] +", "-> Candidate: if not self._candidates or pos > len(self._candidates): return", "> self._vim.options['lines']: anchor = 'SW' row = 0 self._context['filter_winrow'] =", "self._check_matchdelete and self._context['match_highlight']: matches = [x['id'] for x in self._vim.call('getmatches',", "if self._is_multi and source_names != 'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding =", "norelativenumber') self._vim.command('setlocal nospell') self._vim.command('setlocal winfixheight') self._vim.command('setlocal nowrap') if self._context['prompt']: self._vim.command('setlocal", "if re.search(r'[^a-zA-Z]', name) else name[:2]) source_name = short_name if source_names", "self._filter_floating = False self._updated = False self._timers: typing.Dict[str, int] =", "be removed if key in self._timers: self._timers.pop(key) def _split_floating(self, split:", "\" + \" %{denite#get_status('path')}%*\" + \"%{\" + linenr + \"}%*\")", "' if self._context['error_messages']: inpt = '[ERROR] ' + inpt path", "_move_to_next_line(self) -> None: if self._cursor < len(self._candidates): self._cursor += 1", "quit flag is_quit = False if not is_quit and is_manual:", "None: if is_manual: candidates = self._get_selected_candidates() elif self._get_cursor_candidate(): candidates =", "'' if self._context['input']: inpt = self._context['input'] + ' ' if", "self._cursor cursor_candidate = self._get_cursor_candidate() self._init_buffer() self.redraw(False) if cursor_candidate == self._get_candidate(prev_cursor):", "-> None: self._selected_candidates = [] if self._denite: self._denite.gather_candidates(self._context) def _init_cursor(self)", "= self._get_cursor_candidate() if not candidate: return echo(self._vim, 'Normal', '[{}/{}] {}'.format(", "not self._selected_candidates: return [self._get_cursor_candidate() ] if self._get_cursor_candidate() else [] return", "bufnr): self._vim.command('silent bdelete ' + str(bufnr)) self._vim.vars['denite#_previewed_buffers'] = {} self._vim.command('highlight!", "'belowright' if is_fit else 'botright' return direction def _get_wininfo(self) ->", "self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd WinEnter') if self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd BufWinEnter') if", "= int(self._context['cursor_pos']) + 1 elif re.match(r'\\+\\d+', self._context['cursor_pos']): for _ in", "self._vim.call('cursor', pos, 0) self._cursor = pos if self._context['reversed']: self._vim.command('normal! zb')", "denite buffer prev_cursor = self._cursor cursor_candidate = self._get_cursor_candidate() self._init_buffer() self.redraw(False)", "self._move_to_first_line() def _move_to_pos(self, pos: int) -> None: self._vim.call('cursor', pos, 0)", "row if self._vim.call('win_id2win', filter_winid) > 0: self._vim.call('nvim_win_set_config', filter_winid, { 'relative':", "\"%#deniteStatusLineNumber#%{\" + linenr + \"}%*\")) def _get_display_source_name(self, name: str) ->", "(split != 'no' and self._winid > 0 and self._vim.call('win_gotoid', self._winid)):", "str = '', is_manual: bool = False) -> None: if", "default link deniteInput ModeMsg') self._vim.command('highlight link deniteMatchedRange ' + self._context['highlight_matched_range'])", "self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated = False self._stop_timer('update_buffer') def _update_status(self) -> None:", "do_map self._vim.command('silent! autocmd! denite') if re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name): # Ignore", "is not fired after set filetype option. self._vim.command('silent doautocmd FileType", "{ 'relative': 'win', 'win': init_pos['win'], 'row': init_pos['row'], 'col': init_pos['col'], 'width':", "' + self._context['highlight_matched_range']) self._vim.command('highlight link deniteMatchedChar ' + self._context['highlight_matched_char']) self._vim.command('highlight", "self._denite: return False [self._is_async, pattern, statuses, self._entire_len, self._candidates] = self._denite.filter_candidates(self._context)", "'width': winwidth, 'height': winheight, }) filter_col = init_pos['col'] if init_pos['anchor']", "def _close_current_window(self) -> None: if self._vim.call('winnr', '$') == 1: self._vim.command('buffer", "self._matched_char_id = -1 self._check_matchdelete = bool(self._vim.call( 'denite#util#check_matchdelete')) def start(self, sources:", "= [] if self._denite: self._denite.gather_candidates(self._context) def _init_cursor(self) -> None: if", "filter window before preview window self._vim.call('denite#filter#_close_filter_window') if not self._context['has_preview_window']: self._vim.command('pclose!')", "= False self._winheight = 0 self._winwidth = 0 self._winminheight =", "self._gather_candidates() self._init_buffer() self._update_candidates() self._update_buffer() def _start_sources_queue(self, context: UserContext) -> None:", "and self._vim.call( 'win_gotoid', self._winid) if goto: # Jump to denite", "self._update_candidates() self._update_buffer() def _start_sources_queue(self, context: UserContext) -> None: if not", "== 'dynamictop' or direction == 'dynamicbottom': self._update_displayed_texts() winwidth = self._vim.call('winwidth',", "= ' '.join(statuses) if self._is_async: self._start_timer('update_candidates') else: self._stop_timer('update_candidates') updated =", "self._vim.call('matchdelete', self._matched_char_id, self._winid) self._matched_char_id = -1 if self._matched_pattern != '':", "= self._context['wincol'] else: init_pos = self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config', self._winid, {", "= 0 if self._candidates: max_source_name_len = max([ len(self._get_display_source_name(x['source_name'])) for x", "\"}%*\") else: winnr = self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar', winnr, '&statusline', (", "BufWinEnter') if not self._vim.call('has', 'nvim'): # In Vim8, FileType autocmd", "self._is_async or self._candidates) def _check_move_option(self) -> None: if self._context['cursor_pos'].isnumeric(): self._cursor", "or source_names == 'hide': source_name = '' else: short_name =", "split in ['no', 'tab', 'floating', 'floating_relative_window', 'floating_relative_cursor'] else 'sbuffer') self._vim.command(", "self._get_wininfo() self._prev_winid = self._context['prev_winid'] self._winrestcmd = self._vim.call('winrestcmd') self._ruler = self._vim.options['ruler']", "self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'win', 'win': init_pos['win'], 'row':", "}) self._vim.command('resize ' + str(winheight)) if self._context['reversed']: self._vim.command('normal! zb') elif", "(self._context['auto_action'] and prev_candidate != self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated = False self._stop_timer('update_buffer')", "self._ruler def _close_current_window(self) -> None: if self._vim.call('winnr', '$') == 1:", "row > 1: row += self._context['winheight'] row -= self._winheight self._vim.call('nvim_win_set_config',", "if split in ['no', 'tab', 'floating', 'floating_relative_window', 'floating_relative_cursor'] else 'sbuffer')", "path = '[' + self._context['path'] + ']' status = {", "clearmatch, regex_convert_py_vim from denite.util import Nvim, UserContext, Candidates, Candidate from", "self._init_denite() self._gather_candidates() self._init_buffer() self._update_candidates() self._update_buffer() def _start_sources_queue(self, context: UserContext) ->", "self._matched_char_id = self._vim.call( 'matchadd', 'deniteMatchedChar', matched_char_pattern, 10, -1, {'window': self._winid})", "self._save_window_options.items(): self._vim.current.window.options[k] = v else: if self._context['split'] == 'tab': self._vim.command('tabclose!')", "options['buftype'] = 'nofile' options['bufhidden'] = 'delete' options['swapfile'] = False options['buflisted']", "self._bufnr == self._vim.current.buffer.number self._update_status() if self._check_matchdelete and self._context['match_highlight']: matches =", "self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal nolist') self._vim.command('setlocal nonumber') self._vim.command('setlocal norelativenumber') self._vim.command('setlocal nospell')", "self._cursor, len(self._candidates), candidate.get('abbr', candidate['word']))) if goto: # Move to the", "self._stop_timer('update_candidates') updated = (self._displayed_texts != prev_displayed_texts or self._matched_pattern != prev_matched_pattern", "= {} self._prev_curpos: typing.List[typing.Any] = [] self._save_window_options: typing.Dict[str, typing.Any] =", "+ str(bufnr)) self._vim.vars['denite#_previewed_buffers'] = {} self._vim.command('highlight! link CursorLine CursorLine') if", "= False self._is_async = False self._matched_pattern = '' self._displayed_texts: typing.List[str]", "self._prev_status = status linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']: if self._floating", "for key in update: self._context[key] = context[key] self._check_move_option() if self._check_do_option():", "is_quit and post_action == 'open': # Re-open denite buffer prev_cursor", "denite') if self._context['auto_action']: self._vim.command('autocmd denite ' 'CursorMoved <buffer> ' 'call", "1 def _move_to_last_line(self) -> None: self._cursor = len(self._candidates) def _start_timer(self,", "-> None: self._prev_status = dict() self._displayed_texts = [] self._prev_bufnr =", "-> None: if is_manual: candidates = self._get_selected_candidates() elif self._get_cursor_candidate(): candidates", "else ' split' bufname = '[denite]-' + self._context['buffer_name'] if self._vim.call('exists',", "self._context['is_redraw'] = is_force if is_force: self._gather_candidates() if self._update_candidates(): self._update_buffer() else:", "= self._get_wininfo() self._prev_winid = self._context['prev_winid'] self._winrestcmd = self._vim.call('winrestcmd') self._ruler =", "if split == 'floating': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, {", "return False [self._is_async, pattern, statuses, self._entire_len, self._candidates] = self._denite.filter_candidates(self._context) prev_displayed_texts", "self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if winids: # Quit filter buffer self._vim.call('win_gotoid', winids[0])", "-> bool: if not self._denite: return False [self._is_async, pattern, statuses,", "if updated: self._updated = True self._start_timer('update_buffer') if self._context['search'] and self._context['input']:", "else: short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name) if re.search(r'[^a-zA-Z]', name) else", "command: str = '', is_manual: bool = False) -> None:", "= [sources] self._start_sources_queue(context) return self._result def do_action(self, action_name: str, command:", "FileType denite') if self._context['auto_action']: self._vim.command('autocmd denite ' 'CursorMoved <buffer> '", "' ' if self._context['error_messages']: inpt = '[ERROR] ' + inpt", "def _init_buffer(self) -> None: self._prev_status = dict() self._displayed_texts = []", "self._vim.command('highlight default link deniteInput ModeMsg') self._vim.command('highlight link deniteMatchedRange ' +", "not self._vim.call('has', 'nvim'): # In Vim8, FileType autocmd is not", "if not self._vim.call('has', 'nvim'): # In Vim8, FileType autocmd is", "self._context.update(context) self._context['sources'] = sources self._context['is_redraw'] = False self._is_multi = len(sources)", "window to bottom self._vim.command('wincmd J') self._winrestcmd = '' return self._floating", "self._context, action_name, candidates) if not action: return post_action = self._context['post_action']", "'}') self._displayed_texts = [ self._get_candidate_display_text(i) for i in range(0, candidates_len)", "goto = self._winid > 0 and self._vim.call( 'win_gotoid', self._winid) if", "else: self._move_to_first_line() def _move_to_pos(self, pos: int) -> None: self._vim.call('cursor', pos,", "len(self._candidates): self._cursor += 1 def _move_to_prev_line(self) -> None: if self._cursor", "action_name: str, command: str = '', is_manual: bool = False)", "options['filetype'] = 'denite' if self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd WinEnter') if self._vim.call('exists',", "+ str(self._prev_bufnr)) def _init_syntax(self) -> None: self._vim.command('syntax case ignore') self._vim.command('highlight", "self._move_to_pos(prev_cursor) # Disable quit flag is_quit = False if not", "self._vim.command('syntax case ignore') self._vim.command('highlight default link deniteInput ModeMsg') self._vim.command('highlight link", "error, clearmatch, regex_convert_py_vim from denite.util import Nvim, UserContext, Candidates, Candidate", "!= 'denite': self._titlestring = self._vim.options['titlestring'] command = 'edit' if split", ")) self._matched_char_id = self._vim.call( 'matchadd', 'deniteMatchedChar', matched_char_pattern, 10, -1, {'window':", "-> None: # Use floating window if split == 'floating':", "'short' else name return source_name def _get_candidate_display_text(self, index: int) ->", "_get_candidate_display_text(self, index: int) -> str: source_names = self._context['source_names'] candidate =", "row, 'col': self._context['wincol'], 'width': winwidth, 'height': winheight, }) filter_row =", "self._context['wincol'] else: init_pos = self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config', self._winid, { 'relative':", "== self._vim.current.buffer.number self._update_status() if self._check_matchdelete and self._context['match_highlight']: matches = [x['id']", "self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor) if self._context['quick_move'] and do_map(self, 'quick_move', []): return", "+= 1 def _move_to_prev_line(self) -> None: if self._cursor >= 1:", "= False options['filetype'] = 'denite' if self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd WinEnter')", "self._prev_status: return self._bufvars['denite_statusline'] = status self._prev_status = status linenr =", "self._start_timer('update_buffer') if self._context['search'] and self._context['input']: self._vim.call('setreg', '/', self._context['input']) return self._updated", "'immediately_1', 'cursor_pos', 'prev_winid', 'start_filter', 'quick_move') for key in update: self._context[key]", "import re import typing from denite.util import echo, error, clearmatch,", "self._check_move_option() if self._check_do_option(): return self._init_buffer() if context['refresh']: self.redraw() self._move_to_pos(self._cursor) else:", "else [] return [self._candidates[x] for x in self._selected_candidates] def _init_denite(self)", "(self._vim.call('nvim_win_get_position', 0)[0] + self._vim.call('winline') - 1) if self._context['auto_resize']: height =", "filter_row, 'col': filter_col, }) self._vim.command('resize ' + str(winheight)) if self._context['reversed']:", "typing.Dict[str, typing.Any] = {} self._prev_curpos: typing.List[typing.Any] = [] self._save_window_options: typing.Dict[str,", "buffer[:] = self._displayed_texts buffer.options['modifiable'] = False self._previous_text = self._context['input'] self._resize_buffer(is_current_buffer)", "LineNR') self._vim.command('highlight default link ' + 'deniteSelectedLine Statement') if self._floating:", "self._prev_wininfo = self._get_wininfo() self._prev_winid = self._context['prev_winid'] self._winrestcmd = self._vim.call('winrestcmd') self._ruler", "= 'belowright' if is_fit else 'botright' return direction def _get_wininfo(self)", "self._context['winrow'] row = wincol if split == 'floating': if self._context['auto_resize']", "self._context['auto_resize']: height = max(self._winheight, 1) width = max(self._winwidth, 1) else:", "typing.List[typing.Any]: if not self._denite: # if hasattr(self._vim, 'run_coroutine'): # self._denite", "candidates or not action_name: return self._prev_action = action_name action =", "self._context['immediately'] or len(self._candidates) == 1 and self._context['immediately_1']): self._do_immediately() return True", "'*bufadd'): bufnr = self._vim.call('bufadd', bufname) vertical = 'vertical' if split", "%s %s %s %s' % ( self._get_direction(), vertical, command, bufnr,", "self._context.clear() self._context.update(context) self._context['sources'] = sources self._context['is_redraw'] = False self._is_multi =", "key: str) -> None: if key not in self._timers: return", "'width': self._context['winwidth'], 'height': self._context['winheight'], }) elif split == 'floating_relative_cursor': opened_pos", "= wincol if split == 'floating': if self._context['auto_resize'] and row", "self._vim.call('winnr', '$') == 1): return winheight = max(self._winheight, 1) winwidth", "self._vim.call( 'win_gotoid', self._winid) if goto: # Jump to denite window", "= self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] = row if self._vim.call('win_id2win', filter_winid) > 0:", "self._save_window_options = {} window_options = { 'colorcolumn', 'concealcursor', 'conceallevel', 'cursorcolumn',", "= ( \"%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} \" + \" %{denite#get_status('path')}%*\"", "= self._context['path'] def _start(self, sources: typing.List[typing.Any], context: UserContext) -> None:", "max(self._winwidth, 1) else: width = self._context['winwidth'] height = self._context['winheight'] if", "x in self._displayed_texts if self._vim.call('strwidth', x) > winwidth] if direction", "> 1: row += self._context['winheight'] row -= self._winheight self._vim.call('nvim_win_set_config', self._winid,", "%=\" + \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\" + linenr + \"}%*\"))", "and self._vim.current.window.height != winheight: if self._floating: wincol = self._context['winrow'] row", "init_pos['win'], 'row': init_pos['row'], 'col': init_pos['col'], 'width': winwidth, 'height': winheight, })", "== self._prev_wininfo: # Note: execute restcmd twice to restore layout", "1 self._context['filter_winrow'] = row + height + opened_pos self._vim.call( 'nvim_open_win',", "self._prev_bufnr = self._vim.current.buffer.number self._prev_curpos = self._vim.call('getcurpos') self._prev_wininfo = self._get_wininfo() self._prev_winid", "winwidth, 'height': winheight, }) filter_col = init_pos['col'] if init_pos['anchor'] ==", "self._timers.pop(key) def _split_floating(self, split: str) -> None: # Use floating", "self._context['quick_move'] = '' self._start_sources_queue(self._context) return def redraw(self, is_force: bool =", "bufnr = self._vim.call('bufadd', bufname) vertical = 'vertical' if split ==", "'vertical' if split == 'vertical' else '' command = (", "def quit(self) -> None: if self._denite: self._denite.on_close(self._context) self._quit_buffer() self._result =", "-> None: self._context['input'] = '' self._quit_buffer() self._init_denite() self._gather_candidates() self._init_buffer() self._update_candidates()", "0 self._winminheight = -1 self._is_multi = False self._is_async = False", "self._timers: typing.Dict[str, int] = {} self._matched_range_id = -1 self._matched_char_id =", "self.quit() self._denite.do_action(self._context, action_name, candidates) self._result = candidates if command !=", "'width': width, 'height': height, 'anchor': anchor, }) elif split ==", "') + ' '.join(terms).replace('\\n', '') def _get_max_height(self) -> int: return", "and row > 1: row += self._context['winheight'] row -= self._winheight", "self._floating else ( int(self._vim.options['lines']) - int(self._context['winrow']) - int(self._vim.options['cmdheight'])) def _resize_buffer(self,", "if self._context != context: self._context.clear() self._context.update(context) self._context['sources'] = sources self._context['is_redraw']", "= 0 self._winwidth = 0 self._winminheight = -1 self._is_multi =", "_update_candidates(self) -> bool: if not self._denite: return False [self._is_async, pattern,", "local in # neovim. self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal concealcursor=inv')", "to restore layout properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim) def _get_cursor_candidate(self) ->", "False def quit(self) -> None: if self._denite: self._denite.on_close(self._context) self._quit_buffer() self._result", "self._init_buffer() self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor) if self._context['quick_move'] and do_map(self, 'quick_move', []):", "return self._get_candidate(self._cursor) def _get_candidate(self, pos: int) -> Candidate: if not", "row + height + opened_pos self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True,", "self._prev_status: typing.Dict[str, typing.Any] = {} self._prev_curpos: typing.List[typing.Any] = [] self._save_window_options:", "'' self._floating = False self._filter_floating = False self._updated = False", "None: if (self._prev_bufnr == self._bufnr or self._vim.buffers[self._prev_bufnr].name == ''): self._vim.command('enew')", "self._winrestcmd = '' self._initialized = False self._winheight = 0 self._winwidth", "} self._bufvars['denite_statusline'] = {} self._vim.vars['denite#_previewed_buffers'] = {} self._save_window_options = {}", "= self._winid > 0 and self._vim.call( 'win_gotoid', self._winid) if goto:", "{} self._sources_history: typing.List[typing.Any] = [] self._previous_text = '' self._floating =", "'SW' row = 0 self._context['filter_winrow'] = row + opened_pos else:", "link CursorLine CursorLine') if self._floating or self._filter_floating: self._vim.options['titlestring'] = self._titlestring", "def _check_move_option(self) -> None: if self._context['cursor_pos'].isnumeric(): self._cursor = int(self._context['cursor_pos']) +", "self._stop_timer('update_candidates') self._stop_timer('update_buffer') if self._vim.current.buffer.number == self._bufnr: self._cursor = self._vim.call('line', '.')", "'' self._start_sources_queue(self._context) return def redraw(self, is_force: bool = True) ->", "is_current_buffer = self._bufnr == self._vim.current.buffer.number self._update_status() if self._check_matchdelete and self._context['match_highlight']:", "self._winrestcmd = '' return self._floating = split in [ 'floating',", "self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal nofoldenable') self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal nolist') self._vim.command('setlocal nonumber')", "self._context['cursor_pos'] == '$': self._move_to_last_line() def _do_immediately(self) -> None: goto =", "context) if context['sources_queue']: context['sources_queue'].pop(0) context['path'] = self._context['path'] def _start(self, sources:", "'nvim'): # In Vim8, FileType autocmd is not fired after", "self._denite.filter_candidates(self._context) prev_displayed_texts = self._displayed_texts self._update_displayed_texts() prev_matched_pattern = self._matched_pattern self._matched_pattern =", "is_quit = action['is_quit'] or post_action == 'quit' if is_quit: self.quit()", "return self._is_async def __init__(self, vim: Nvim) -> None: self._vim =", "def _restart(self) -> None: self._context['input'] = '' self._quit_buffer() self._init_denite() self._gather_candidates()", "self._context['selected_icon'])) if self._denite: self._denite.init_syntax(self._context, self._is_multi) def _update_candidates(self) -> bool: if", "self._context['split'] if (split == 'no' or split == 'tab' or", "context['refresh']: self.redraw() self._move_to_pos(self._cursor) else: if self._context != context: self._context.clear() self._context.update(context)", "is_current_buffer: self._vim.call('win_gotoid', restore) def _check_do_option(self) -> bool: if self._context['do'] !=", "Statement') if self._floating: self._vim.current.window.options['winhighlight'] = ( 'Normal:' + self._context['highlight_window_background'] )", "= self._cursor cursor_candidate = self._get_cursor_candidate() self._init_buffer() self.redraw(False) if cursor_candidate ==", "is_quit and is_manual: self._selected_candidates = [] self.redraw(action['is_redraw']) if is_manual and", "Use floating window if split == 'floating': self._vim.call( 'nvim_open_win', self._vim.call('bufnr',", "= bool(self._vim.call( 'denite#util#check_matchdelete')) def start(self, sources: typing.List[typing.Any], context: UserContext) ->", "'[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace(' ', '') )) self._matched_char_id = self._vim.call(", "== 'no' or split == 'tab' or self._vim.call('winnr', '$') ==", "called, self._timers may be removed if key in self._timers: self._timers.pop(key)", "\"}%*\")) def _get_display_source_name(self, name: str) -> str: source_names = self._context['source_names']", "opened_pos = (self._vim.call('nvim_win_get_position', 0)[0] + self._vim.call('winline') - 1) if self._context['auto_resize']:", "= self._context['winwidth'] height = self._context['winheight'] if opened_pos + height +", "if not is_quit and is_manual: self._selected_candidates = [] self.redraw(action['is_redraw']) if", "' contains=deniteConcealedMark') % ( self._context['selected_icon'])) self._vim.command(('syntax match deniteConcealedMark /^[ %s]/'", "-> None: if self._cursor >= 1: self._cursor -= 1 def", "candidate['word']))) if goto: # Move to the previous window self._vim.command('wincmd", "False) -> None: if is_manual: candidates = self._get_selected_candidates() elif self._get_cursor_candidate():", "\"%#deniteInput#%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} %=\" + \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\"", "self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr) def _stop_timer(self, key: str) -> None: if", "'[' + self._context['path'] + ']' status = { 'input': inpt,", "= self._vim.call('line', '.') # Note: Close filter window before preview", "and is_changed: if not is_current_buffer: save_winid = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid)", "self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ] def _switch_prev_buffer(self) -> None: if (self._prev_bufnr", "'path': path, # Extra 'buffer_name': self._context['buffer_name'], 'line_total': len(self._candidates), } if", "self._vim.command('wincmd J') self._winrestcmd = '' return self._floating = split in", "instead of \"current.window.options\" # \"current.window.options\" changes global value instead of", "else: self._vim.command('buffer ' + str(self._prev_bufnr)) def _init_syntax(self) -> None: self._vim.command('syntax", "if self._context['auto_resize'] and row > 1: row += self._context['winheight'] row", "= 'aboveleft' if is_fit else 'topleft' else: direction = 'belowright'", "self._candidates]) self._context['max_source_name_len'] = max_source_name_len self._context['max_source_name_format'] = ( '{:<' + str(self._context['max_source_name_len'])", "i in range(0, candidates_len) ] def _update_buffer(self) -> None: is_current_buffer", "self._vim.command('setlocal nofoldenable') self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal nolist') self._vim.command('setlocal nonumber') self._vim.command('setlocal norelativenumber')", "self._context['has_preview_window']: self._vim.command('pclose!') # Clear previewed buffers for bufnr in self._vim.vars['denite#_previewed_buffers'].keys():", "if status == self._prev_status: return self._bufvars['denite_statusline'] = status self._prev_status =", "if is_fit else 'botright' return direction def _get_wininfo(self) -> typing.List[typing.Any]:", "'spell', 'winfixheight', 'wrap', } for k in window_options: self._save_window_options[k] =", "+ \"%{denite#get_status('sources')} \" + \" %{denite#get_status('path')}%*\" + \"%{\" + linenr", "command += ' vsplit' if split == 'vertical' else '", "elif self._context['filter_split_direction'] == 'floating': self._filter_floating = True elif split !=", "self._vim.command('silent bdelete ' + str(bufnr)) self._vim.vars['denite#_previewed_buffers'] = {} self._vim.command('highlight! link", "!= 'no': command = self._get_direction() command += ' vsplit' if", "self._prev_action = '' self._prev_status: typing.Dict[str, typing.Any] = {} self._prev_curpos: typing.List[typing.Any]", "self._context['buffer_name'], } self._bufvars['denite_statusline'] = {} self._vim.vars['denite#_previewed_buffers'] = {} self._save_window_options =", "'Normal', '[{}/{}] {}'.format( self._cursor, len(self._candidates), candidate.get('abbr', candidate['word']))) if goto: #", "# Move to the previous window self._vim.command('wincmd p') def _do_command(self,", "at g<EMAIL>> # License: MIT license # ============================================================================ import re", "and is_manual: self._selected_candidates = [] self.redraw(action['is_redraw']) if is_manual and self._context['sources_queue']:", "sources: typing.List[typing.Any], context: UserContext) -> typing.List[typing.Any]: if not self._denite: #", "self._bufnr = -1 self._winid = -1 self._winrestcmd = '' self._initialized", "# Re-open denite buffer prev_cursor = self._cursor cursor_candidate = self._get_cursor_candidate()", "self._candidates: max_source_name_len = max([ len(self._get_display_source_name(x['source_name'])) for x in self._candidates]) self._context['max_source_name_len']", "= 0 if wincol == 1 else row + winheight", "r'\\1', name) if re.search(r'[^a-zA-Z]', name) else name[:2]) source_name = short_name", "0]) if is_current_buffer: if (self._context['auto_action'] and prev_candidate != self._get_cursor_candidate()): self.do_action(self._context['auto_action'])", "= max([ len(self._get_display_source_name(x['source_name'])) for x in self._candidates]) self._context['max_source_name_len'] = max_source_name_len", "_get_direction(self) -> str: direction = str(self._context['direction']) if direction == 'dynamictop'", "self._winheight = self._vim.current.window.height self._winwidth = self._vim.current.window.width self._bufvars = self._vim.current.buffer.vars self._bufvars['denite']", "if self._context['reversed']: self._move_to_last_line() else: self._move_to_first_line() def _move_to_pos(self, pos: int) ->", "<NAME> <<EMAIL> at g<EMAIL>> # License: MIT license # ============================================================================", "self._do_command(self._context['do']) return True elif (self._candidates and self._context['immediately'] or len(self._candidates) ==", "self._context: UserContext = {} self._bufnr = -1 self._winid = -1", "'cursor', 'row': row, 'col': 0, 'width': width, 'height': height, 'anchor':", "signcolumn=auto') if self._context['cursorline']: self._vim.command('setlocal cursorline') options = self._vim.current.buffer.options if self._floating:", "self._update_status() self._context['is_redraw'] = False def quit(self) -> None: if self._denite:", "case ignore') self._vim.command('highlight default link deniteInput ModeMsg') self._vim.command('highlight link deniteMatchedRange", "if is_current_buffer: if (self._context['auto_action'] and prev_candidate != self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated", "= 'delete' options['swapfile'] = False options['buflisted'] = False options['modeline'] =", "self._context['path'] def _start(self, sources: typing.List[typing.Any], context: UserContext) -> None: from", "' '.join(terms).replace('\\n', '') def _get_max_height(self) -> int: return int(self._vim.options['lines']) if", "twice to restore layout properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim) def _get_cursor_candidate(self)", "MIT license # ============================================================================ import re import typing from denite.util", "elif re.match(r'\\+\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif re.match(r'-\\d+',", "self._displayed_texts: typing.List[str] = [] self._statusline_sources = '' self._titlestring = ''", "self._winid) # Restore the window if self._context['split'] == 'no': self._switch_prev_buffer()", "}) elif split == 'floating_relative_window': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True,", "'sources': context['sources_queue'][0], 'path': context['path'], }) self._start(context['sources_queue'][0], context) if context['sources_queue']: context['sources_queue'].pop(0)", "return (str(self._context['selected_icon']) if index in self._selected_candidates else ' ') +", "width = max(self._winwidth, 1) else: width = self._context['winwidth'] height =", "self._vim.call('win_id2win', filter_winid) > 0: self._vim.call('nvim_win_set_config', filter_winid, { 'relative': 'editor', 'row':", "(self._context['empty'] or self._is_async or self._candidates) def _check_move_option(self) -> None: if", "if self._updated and is_changed: if not is_current_buffer: save_winid = self._vim.call('win_getid')", "default link ' + 'deniteStatusLinePath Comment') self._vim.command('highlight default link '", "zb') elif is_vertical and self._vim.current.window.width != winwidth: self._vim.command('vertical resize '", "row, 'col': 0, 'width': width, 'height': height, 'anchor': anchor, })", "elif (self._candidates and self._context['immediately'] or len(self._candidates) == 1 and self._context['immediately_1']):", "self._is_multi) def _update_candidates(self) -> bool: if not self._denite: return False", "self._winid) self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'win', 'win': init_pos['win'], 'row': init_pos['row'],", "self._titlestring self._vim.options['ruler'] = self._ruler def _close_current_window(self) -> None: if self._vim.call('winnr',", "row = wincol if split == 'floating': if self._context['auto_resize'] and", "error(self._vim, 'Empty sources') return self._init_denite() self._gather_candidates() self._update_candidates() self._init_cursor() self._check_move_option() if", "self._vim.call('setreg', '/', self._context['input']) return self._updated def _update_displayed_texts(self) -> None: candidates_len", "( \"%#deniteInput#%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} %=\" + \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" +", "'floating_relative_cursor'] else 'sbuffer') self._vim.command( 'silent keepalt %s %s %s %s'", "return self._updated def _update_displayed_texts(self) -> None: candidates_len = len(self._candidates) if", "return [self._get_cursor_candidate() ] if self._get_cursor_candidate() else [] return [self._candidates[x] for", "self._vim.call('matchdelete', self._matched_range_id, self._winid) self._matched_range_id = -1 if self._matched_char_id in matches:", "_do_immediately(self) -> None: goto = self._winid > 0 and self._vim.call(", "zb') def _move_to_next_line(self) -> None: if self._cursor < len(self._candidates): self._cursor", "'quick_move', []): return if self._context['start_filter']: do_map(self, 'open_filter_buffer', []) def _init_buffer(self)", "if is_manual: candidates = self._get_selected_candidates() elif self._get_cursor_candidate(): candidates = [self._get_cursor_candidate()]", "self._selected_candidates] def _init_denite(self) -> None: if self._denite: self._denite.start(self._context) self._denite.on_init(self._context) self._initialized", "= 1 self._context['filter_winrow'] = row + height + opened_pos self._vim.call(", "inpt = self._context['input'] + ' ' if self._context['error_messages']: inpt =", "self._prev_curpos = self._vim.call('getcurpos') self._prev_wininfo = self._get_wininfo() self._prev_winid = self._context['prev_winid'] self._winrestcmd", "'floating': self._filter_floating = True elif split != 'no': command =", "'foldcolumn', 'foldenable', 'list', 'number', 'relativenumber', 'signcolumn', 'spell', 'winfixheight', 'wrap', }", "[x for x in self._displayed_texts if self._vim.call('strwidth', x) > winwidth]", "is_vertical and self._vim.current.window.height != winheight: if self._floating: wincol = self._context['winrow']", "self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim) def _get_cursor_candidate(self) -> Candidate: return self._get_candidate(self._cursor) def", "bool: return self._is_async def __init__(self, vim: Nvim) -> None: self._vim", "'editor', 'row': row, 'col': self._context['wincol'], 'width': winwidth, 'height': winheight, })", "_get_max_height(self) -> int: return int(self._vim.options['lines']) if not self._floating else (", "candidate.get('abbr', candidate['word']))) if goto: # Move to the previous window", "str(self._prev_bufnr)) def _init_syntax(self) -> None: self._vim.command('syntax case ignore') self._vim.command('highlight default", "len(self._candidates), } if status == self._prev_status: return self._bufvars['denite_statusline'] = status", "is_current_buffer: if (self._context['auto_action'] and prev_candidate != self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated =", "_update_displayed_texts(self) -> None: candidates_len = len(self._candidates) if not self._is_async and", "== 'dynamicbottom': self._update_displayed_texts() winwidth = self._vim.call('winwidth', 0) is_fit = not", "FILE: default.py # AUTHOR: <NAME> <<EMAIL> at g<EMAIL>> # License:", "use setlocal instead of \"current.window.options\" # \"current.window.options\" changes global value", "' vsplit' if split == 'vertical' else ' split' bufname", "-1 self._check_matchdelete = bool(self._vim.call( 'denite#util#check_matchdelete')) def start(self, sources: typing.List[typing.Any], context:", "self._check_move_option() if self._check_do_option(): return self._init_buffer() self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor) if self._context['quick_move']", "inpt = '' if self._context['input']: inpt = self._context['input'] + '", "'relative': 'cursor', 'row': row, 'col': 0, 'width': width, 'height': height,", "= winminheight elif candidates_len > max_height: self._winheight = max_height elif" ]
[ "_stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"): \"\"\" A context manager to temporarily redirect", "= None dest_file = None try: if stdchannel is None:", "\"RedirectStderr\"] @contextlib.contextmanager def _stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"): \"\"\" A context manager", "-*- coding: utf-8 -*- \"\"\"Context managers implemented for (mostly) internal", "None: yield iter([None]) else: oldstdchannel = os.dup(stdchannel.fileno()) dest_file = open(dest_filename,", "(mostly) internal use\"\"\" import contextlib import functools from io import", "yield iter([None]) finally: if oldstdchannel is not None: os.dup2(oldstdchannel, stdchannel.fileno())", "UnsupportedOperation import os import sys __all__ = [\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager", "None dest_file = None try: if stdchannel is None: yield", "None try: if stdchannel is None: yield iter([None]) else: oldstdchannel", "os import sys __all__ = [\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager def _stdchannel_redirected(stdchannel,", "(http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel = None dest_file = None try: if", "<filename>PyDSTool/core/context_managers.py # -*- coding: utf-8 -*- \"\"\"Context managers implemented for", "or stderr Originally by <NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel =", "import UnsupportedOperation import os import sys __all__ = [\"RedirectStdout\", \"RedirectStderr\"]", "stdout or stderr Originally by <NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel", "None: dest_file.close() RedirectStdout = functools.partial(_stdchannel_redirected, sys.stdout) RedirectStderr = functools.partial(_stdchannel_redirected, sys.stderr)", "functools.partial(_stdchannel_redirected, sys.stdout) RedirectStderr = functools.partial(_stdchannel_redirected, sys.stderr) RedirectNoOp = functools.partial(_stdchannel_redirected, None,", "# -*- coding: utf-8 -*- \"\"\"Context managers implemented for (mostly)", "\"\"\" A context manager to temporarily redirect stdout or stderr", "finally: if oldstdchannel is not None: os.dup2(oldstdchannel, stdchannel.fileno()) if dest_file", "use\"\"\" import contextlib import functools from io import UnsupportedOperation import", "yield except (UnsupportedOperation, AttributeError): yield iter([None]) finally: if oldstdchannel is", "to temporarily redirect stdout or stderr Originally by <NAME>, 2013", "mode=\"w\"): \"\"\" A context manager to temporarily redirect stdout or", "contextlib import functools from io import UnsupportedOperation import os import", "import contextlib import functools from io import UnsupportedOperation import os", "manager to temporarily redirect stdout or stderr Originally by <NAME>,", "Originally by <NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel = None dest_file", "= functools.partial(_stdchannel_redirected, sys.stdout) RedirectStderr = functools.partial(_stdchannel_redirected, sys.stderr) RedirectNoOp = functools.partial(_stdchannel_redirected,", "2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel = None dest_file = None try:", "else: oldstdchannel = os.dup(stdchannel.fileno()) dest_file = open(dest_filename, mode) os.dup2(dest_file.fileno(), stdchannel.fileno())", "= None try: if stdchannel is None: yield iter([None]) else:", "= os.dup(stdchannel.fileno()) dest_file = open(dest_filename, mode) os.dup2(dest_file.fileno(), stdchannel.fileno()) yield except", "is None: yield iter([None]) else: oldstdchannel = os.dup(stdchannel.fileno()) dest_file =", "@contextlib.contextmanager def _stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"): \"\"\" A context manager to", "iter([None]) finally: if oldstdchannel is not None: os.dup2(oldstdchannel, stdchannel.fileno()) if", "io import UnsupportedOperation import os import sys __all__ = [\"RedirectStdout\",", "dest_file = None try: if stdchannel is None: yield iter([None])", "is not None: os.dup2(oldstdchannel, stdchannel.fileno()) if dest_file is not None:", "redirect stdout or stderr Originally by <NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\"", "[\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager def _stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"): \"\"\" A context", "is not None: dest_file.close() RedirectStdout = functools.partial(_stdchannel_redirected, sys.stdout) RedirectStderr =", "dest_file.close() RedirectStdout = functools.partial(_stdchannel_redirected, sys.stdout) RedirectStderr = functools.partial(_stdchannel_redirected, sys.stderr) RedirectNoOp", "context manager to temporarily redirect stdout or stderr Originally by", "def _stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"): \"\"\" A context manager to temporarily", "import sys __all__ = [\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager def _stdchannel_redirected(stdchannel, dest_filename,", "for (mostly) internal use\"\"\" import contextlib import functools from io", "not None: dest_file.close() RedirectStdout = functools.partial(_stdchannel_redirected, sys.stdout) RedirectStderr = functools.partial(_stdchannel_redirected,", "open(dest_filename, mode) os.dup2(dest_file.fileno(), stdchannel.fileno()) yield except (UnsupportedOperation, AttributeError): yield iter([None])", "stderr Originally by <NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel = None", "= [\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager def _stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"): \"\"\" A", "implemented for (mostly) internal use\"\"\" import contextlib import functools from", "AttributeError): yield iter([None]) finally: if oldstdchannel is not None: os.dup2(oldstdchannel,", "(UnsupportedOperation, AttributeError): yield iter([None]) finally: if oldstdchannel is not None:", "import os import sys __all__ = [\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager def", "-*- \"\"\"Context managers implemented for (mostly) internal use\"\"\" import contextlib", "oldstdchannel is not None: os.dup2(oldstdchannel, stdchannel.fileno()) if dest_file is not", "RedirectStdout = functools.partial(_stdchannel_redirected, sys.stdout) RedirectStderr = functools.partial(_stdchannel_redirected, sys.stderr) RedirectNoOp =", "stdchannel is None: yield iter([None]) else: oldstdchannel = os.dup(stdchannel.fileno()) dest_file", "coding: utf-8 -*- \"\"\"Context managers implemented for (mostly) internal use\"\"\"", "__all__ = [\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager def _stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"): \"\"\"", "try: if stdchannel is None: yield iter([None]) else: oldstdchannel =", "not None: os.dup2(oldstdchannel, stdchannel.fileno()) if dest_file is not None: dest_file.close()", "by <NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel = None dest_file =", "\"\"\"Context managers implemented for (mostly) internal use\"\"\" import contextlib import", "temporarily redirect stdout or stderr Originally by <NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/)", "dest_filename, mode=\"w\"): \"\"\" A context manager to temporarily redirect stdout", "yield iter([None]) else: oldstdchannel = os.dup(stdchannel.fileno()) dest_file = open(dest_filename, mode)", "= open(dest_filename, mode) os.dup2(dest_file.fileno(), stdchannel.fileno()) yield except (UnsupportedOperation, AttributeError): yield", "from io import UnsupportedOperation import os import sys __all__ =", "A context manager to temporarily redirect stdout or stderr Originally", "internal use\"\"\" import contextlib import functools from io import UnsupportedOperation", "oldstdchannel = None dest_file = None try: if stdchannel is", "if oldstdchannel is not None: os.dup2(oldstdchannel, stdchannel.fileno()) if dest_file is", "except (UnsupportedOperation, AttributeError): yield iter([None]) finally: if oldstdchannel is not", "sys __all__ = [\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager def _stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"):", "sys.stdout) RedirectStderr = functools.partial(_stdchannel_redirected, sys.stderr) RedirectNoOp = functools.partial(_stdchannel_redirected, None, \"\")", "stdchannel.fileno()) yield except (UnsupportedOperation, AttributeError): yield iter([None]) finally: if oldstdchannel", "if stdchannel is None: yield iter([None]) else: oldstdchannel = os.dup(stdchannel.fileno())", "os.dup(stdchannel.fileno()) dest_file = open(dest_filename, mode) os.dup2(dest_file.fileno(), stdchannel.fileno()) yield except (UnsupportedOperation,", "stdchannel.fileno()) if dest_file is not None: dest_file.close() RedirectStdout = functools.partial(_stdchannel_redirected,", "utf-8 -*- \"\"\"Context managers implemented for (mostly) internal use\"\"\" import", "import functools from io import UnsupportedOperation import os import sys", "os.dup2(dest_file.fileno(), stdchannel.fileno()) yield except (UnsupportedOperation, AttributeError): yield iter([None]) finally: if", "functools from io import UnsupportedOperation import os import sys __all__", "dest_file = open(dest_filename, mode) os.dup2(dest_file.fileno(), stdchannel.fileno()) yield except (UnsupportedOperation, AttributeError):", "managers implemented for (mostly) internal use\"\"\" import contextlib import functools", "mode) os.dup2(dest_file.fileno(), stdchannel.fileno()) yield except (UnsupportedOperation, AttributeError): yield iter([None]) finally:", "if dest_file is not None: dest_file.close() RedirectStdout = functools.partial(_stdchannel_redirected, sys.stdout)", "<NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel = None dest_file = None", "os.dup2(oldstdchannel, stdchannel.fileno()) if dest_file is not None: dest_file.close() RedirectStdout =", "None: os.dup2(oldstdchannel, stdchannel.fileno()) if dest_file is not None: dest_file.close() RedirectStdout", "dest_file is not None: dest_file.close() RedirectStdout = functools.partial(_stdchannel_redirected, sys.stdout) RedirectStderr", "\"\"\" oldstdchannel = None dest_file = None try: if stdchannel", "oldstdchannel = os.dup(stdchannel.fileno()) dest_file = open(dest_filename, mode) os.dup2(dest_file.fileno(), stdchannel.fileno()) yield", "iter([None]) else: oldstdchannel = os.dup(stdchannel.fileno()) dest_file = open(dest_filename, mode) os.dup2(dest_file.fileno()," ]
[ "\"grey\" app_email = \"<EMAIL>\" app_license = \"MIT\" # Includes in", ": \"public/js/doctype.js\"} # doctype_list_js = {\"doctype\" : \"public/js/doctype_list.js\"} # doctype_tree_js", ": \"public/js/doctype_tree.js\"} # doctype_calendar_js = {\"doctype\" : \"public/js/doctype_calendar.js\"} fixtures =", "\"in\", [ \"Sales Invoice Item-pos_kiosk\", \"Mode of Payment-logo\" ] ]", "app_icon = \"octicon octicon-file-directory\" app_color = \"grey\" app_email = \"<EMAIL>\"", "], # \"weekly\": [ # \"pos_kiosk.tasks.weekly\" # ] # \"monthly\":", "Scheduled Tasks # --------------- # scheduler_events = { # \"all\":", "---------- # automatically create page for each record of this", "in header of desk.html # app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js", "{ # \"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] # } # include js", "\"public/js/doctype_list.js\"} # doctype_tree_js = {\"doctype\" : \"public/js/doctype_tree.js\"} # doctype_calendar_js =", "<head> # ------------------ # include js, css files in header", "} # Website user home page (by function) # get_website_user_home_page", "css files in header of desk.html # app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\"", "# ], # \"daily\": [ # \"pos_kiosk.tasks.daily\" # ], #", "[ # \"pos_kiosk.tasks.daily\" # ], # \"hourly\": [ # \"pos_kiosk.tasks.hourly\"", "app_version app_name = \"pos_kiosk\" app_title = \"Pos Kiosk\" app_publisher =", "scripted ways # permission_query_conditions = { # \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", #", "# Generators # ---------- # automatically create page for each", "] } ] # Home Pages # ---------- # application", "frappe.core.notifications.get_notification_config # notification_config = \"pos_kiosk.notifications.get_notification_config\" # Permissions # ----------- #", "Installation # ------------ # before_install = \"pos_kiosk.install.before_install\" # after_install =", "\"/assets/pos_kiosk/js/pos_kiosk.js\" # include js in page # page_js = {\"page\"", "= \"<EMAIL>\" app_license = \"MIT\" # Includes in <head> #", "doctype views # doctype_js = {\"doctype\" : \"public/js/doctype.js\"} # doctype_list_js", "App\" app_icon = \"octicon octicon-file-directory\" app_color = \"grey\" app_email =", "# \"pos_kiosk.tasks.monthly\" # ] # } # Testing # -------", "function) # get_website_user_home_page = \"pos_kiosk.utils.get_home_page\" # Generators # ---------- #", "] # Home Pages # ---------- # application home page", "# before_tests = \"pos_kiosk.install.before_tests\" # Overriding Whitelisted Methods # ------------------------------", "# \"monthly\": [ # \"pos_kiosk.tasks.monthly\" # ] # } #", "= { # \"Role\": \"home_page\" # } # Website user", "] # } # Testing # ------- # before_tests =", "= [\"Web Page\"] # Installation # ------------ # before_install =", "# \"*\": { # \"on_update\": \"method\", # \"on_cancel\": \"method\", #", "# role_home_page = { # \"Role\": \"home_page\" # } #", "= \"pos_kiosk\" app_title = \"Pos Kiosk\" app_publisher = \"9t9it\" app_description", "= \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js in page # page_js =", "\"*\": { # \"on_update\": \"method\", # \"on_cancel\": \"method\", # \"on_trash\":", "js in doctype views # doctype_js = {\"doctype\" : \"public/js/doctype.js\"}", "See frappe.core.notifications.get_notification_config # notification_config = \"pos_kiosk.notifications.get_notification_config\" # Permissions # -----------", "page for each record of this doctype # website_generators =", "# } # # has_permission = { # \"Event\": \"frappe.desk.doctype.event.event.has_permission\",", "include js, css files in header of web template #", "\"pos_kiosk.tasks.hourly\" # ], # \"weekly\": [ # \"pos_kiosk.tasks.weekly\" # ]", ". import __version__ as app_version app_name = \"pos_kiosk\" app_title =", "= \"Pos Kiosk\" app_publisher = \"9t9it\" app_description = \"Kiosk App\"", "app_name = \"pos_kiosk\" app_title = \"Pos Kiosk\" app_publisher = \"9t9it\"", "web template # web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\"", "page_js = { # \"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] # } #", "\"method\" # } # } # Scheduled Tasks # ---------------", "= \"octicon octicon-file-directory\" app_color = \"grey\" app_email = \"<EMAIL>\" app_license", "{\"doctype\" : \"public/js/doctype_calendar.js\"} fixtures = [ { \"doctype\": \"Custom Field\",", "# # override_whitelisted_methods = { # \"pos_bahrain.api.get_item_details.get_item_details\": \"pos_kiosk.api.item.get_item_details\" # noqa", "= {\"doctype\" : \"public/js/doctype_calendar.js\"} fixtures = [ { \"doctype\": \"Custom", "for each record of this doctype # website_generators = [\"Web", "# \"pos_kiosk.tasks.weekly\" # ] # \"monthly\": [ # \"pos_kiosk.tasks.monthly\" #", "application home page (will override Website Settings) # home_page =", "------------------ # See frappe.core.notifications.get_notification_config # notification_config = \"pos_kiosk.notifications.get_notification_config\" # Permissions", ": \"public/js/doctype_calendar.js\"} fixtures = [ { \"doctype\": \"Custom Field\", \"filters\":", "= \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js, css files in header of", "methods and events # doc_events = { # \"*\": {", "= \"MIT\" # Includes in <head> # ------------------ # include", "{\"doctype\" : \"public/js/doctype.js\"} # doctype_list_js = {\"doctype\" : \"public/js/doctype_list.js\"} #", "= {\"doctype\" : \"public/js/doctype_tree.js\"} # doctype_calendar_js = {\"doctype\" : \"public/js/doctype_calendar.js\"}", "Pages # ---------- # application home page (will override Website", "= \"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js in", "home_page = \"login\" # website user home page (by Role)", "], # \"daily\": [ # \"pos_kiosk.tasks.daily\" # ], # \"hourly\":", "utf-8 -*- from __future__ import unicode_literals from . import __version__", "{\"doctype\" : \"public/js/doctype_list.js\"} # doctype_tree_js = {\"doctype\" : \"public/js/doctype_tree.js\"} #", "# has_permission = { # \"Event\": \"frappe.desk.doctype.event.event.has_permission\", # } #", "# ---------- # application home page (will override Website Settings)", "Payment-logo\" ] ] ] } ] # Home Pages #", "Methods # ------------------------------ # # override_whitelisted_methods = { # \"pos_bahrain.api.get_item_details.get_item_details\":", "\"9t9it\" app_description = \"Kiosk App\" app_icon = \"octicon octicon-file-directory\" app_color", "Item-pos_kiosk\", \"Mode of Payment-logo\" ] ] ] } ] #", "user home page (by function) # get_website_user_home_page = \"pos_kiosk.utils.get_home_page\" #", "# \"on_update\": \"method\", # \"on_cancel\": \"method\", # \"on_trash\": \"method\" #", "= { # \"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] # } # include", "# before_install = \"pos_kiosk.install.before_install\" # after_install = \"pos_kiosk.install.after_install\" # Desk", "\"on_update\": \"method\", # \"on_cancel\": \"method\", # \"on_trash\": \"method\" # }", "fixtures = [ { \"doctype\": \"Custom Field\", \"filters\": [ [", "[ \"Sales Invoice Item-pos_kiosk\", \"Mode of Payment-logo\" ] ] ]", "include js, css files in header of desk.html # app_include_css", "web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js", "------------------ # include js, css files in header of desk.html", "of this doctype # website_generators = [\"Web Page\"] # Installation", "doctype_calendar_js = {\"doctype\" : \"public/js/doctype_calendar.js\"} fixtures = [ { \"doctype\":", "page # page_js = {\"page\" : \"public/js/file.js\"} # page_js =", "[ # \"pos_kiosk.tasks.all\" # ], # \"daily\": [ # \"pos_kiosk.tasks.daily\"", "# \"weekly\": [ # \"pos_kiosk.tasks.weekly\" # ] # \"monthly\": [", "Kiosk\" app_publisher = \"9t9it\" app_description = \"Kiosk App\" app_icon =", "desk.html # app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" #", "of Payment-logo\" ] ] ] } ] # Home Pages", "(will override Website Settings) # home_page = \"login\" # website", "unicode_literals from . import __version__ as app_version app_name = \"pos_kiosk\"", "\"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] # } # include js in doctype", "after_install = \"pos_kiosk.install.after_install\" # Desk Notifications # ------------------ # See", "# Document Events # --------------- # Hook on document methods", "\"hourly\": [ # \"pos_kiosk.tasks.hourly\" # ], # \"weekly\": [ #", "# \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", # } # # has_permission = {", "__version__ as app_version app_name = \"pos_kiosk\" app_title = \"Pos Kiosk\"", "web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js in page # page_js", "} # Scheduled Tasks # --------------- # scheduler_events = {", "js, css files in header of desk.html # app_include_css =", "app_publisher = \"9t9it\" app_description = \"Kiosk App\" app_icon = \"octicon", "__future__ import unicode_literals from . import __version__ as app_version app_name", "Tasks # --------------- # scheduler_events = { # \"all\": [", "# Website user home page (by function) # get_website_user_home_page =", "= \"pos_kiosk.install.before_tests\" # Overriding Whitelisted Methods # ------------------------------ # #", "ways # permission_query_conditions = { # \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", # }", "include js in page # page_js = {\"page\" : \"public/js/file.js\"}", "# \"on_trash\": \"method\" # } # } # Scheduled Tasks", "# include js in doctype views # doctype_js = {\"doctype\"", "\"all\": [ # \"pos_kiosk.tasks.all\" # ], # \"daily\": [ #", "\"Mode of Payment-logo\" ] ] ] } ] # Home", "----------- # Permissions evaluated in scripted ways # permission_query_conditions =", "= \"grey\" app_email = \"<EMAIL>\" app_license = \"MIT\" # Includes", "\"Sales Invoice Item-pos_kiosk\", \"Mode of Payment-logo\" ] ] ] }", "# ------------------ # include js, css files in header of", "Notifications # ------------------ # See frappe.core.notifications.get_notification_config # notification_config = \"pos_kiosk.notifications.get_notification_config\"", "\"<EMAIL>\" app_license = \"MIT\" # Includes in <head> # ------------------", "record of this doctype # website_generators = [\"Web Page\"] #", "\"pos_kiosk.install.before_tests\" # Overriding Whitelisted Methods # ------------------------------ # # override_whitelisted_methods", "\"pos_kiosk\" app_title = \"Pos Kiosk\" app_publisher = \"9t9it\" app_description =", "import unicode_literals from . import __version__ as app_version app_name =", "permission_query_conditions = { # \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", # } # #", "Hook on document methods and events # doc_events = {", "\"daily\": [ # \"pos_kiosk.tasks.daily\" # ], # \"hourly\": [ #", "# \"pos_kiosk.tasks.hourly\" # ], # \"weekly\": [ # \"pos_kiosk.tasks.weekly\" #", "Permissions evaluated in scripted ways # permission_query_conditions = { #", "Website Settings) # home_page = \"login\" # website user home", "# Home Pages # ---------- # application home page (will", "before_tests = \"pos_kiosk.install.before_tests\" # Overriding Whitelisted Methods # ------------------------------ #", "automatically create page for each record of this doctype #", "create page for each record of this doctype # website_generators", "role_home_page = { # \"Role\": \"home_page\" # } # Website", "files in header of web template # web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\"", "# \"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] # } # include js in", "{ # \"on_update\": \"method\", # \"on_cancel\": \"method\", # \"on_trash\": \"method\"", "[ # \"pos_kiosk.tasks.weekly\" # ] # \"monthly\": [ # \"pos_kiosk.tasks.monthly\"", "in page # page_js = {\"page\" : \"public/js/file.js\"} # page_js", "css files in header of web template # web_include_css =", "Events # --------------- # Hook on document methods and events", "doctype_tree_js = {\"doctype\" : \"public/js/doctype_tree.js\"} # doctype_calendar_js = {\"doctype\" :", "has_permission = { # \"Event\": \"frappe.desk.doctype.event.event.has_permission\", # } # Document", "Testing # ------- # before_tests = \"pos_kiosk.install.before_tests\" # Overriding Whitelisted", "# Testing # ------- # before_tests = \"pos_kiosk.install.before_tests\" # Overriding", "= \"pos_kiosk.install.before_install\" # after_install = \"pos_kiosk.install.after_install\" # Desk Notifications #", "= { # \"*\": { # \"on_update\": \"method\", # \"on_cancel\":", "# Permissions evaluated in scripted ways # permission_query_conditions = {", "} ] # Home Pages # ---------- # application home", "files in header of desk.html # app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" #", "--------------- # Hook on document methods and events # doc_events", "# include js in page # page_js = {\"page\" :", "# ----------- # Permissions evaluated in scripted ways # permission_query_conditions", "# Permissions # ----------- # Permissions evaluated in scripted ways", "# doctype_list_js = {\"doctype\" : \"public/js/doctype_list.js\"} # doctype_tree_js = {\"doctype\"", "{ # \"Event\": \"frappe.desk.doctype.event.event.has_permission\", # } # Document Events #", "(by Role) # role_home_page = { # \"Role\": \"home_page\" #", "# ------------------------------ # # override_whitelisted_methods = { # \"pos_bahrain.api.get_item_details.get_item_details\": \"pos_kiosk.api.item.get_item_details\"", "# \"pos_kiosk.tasks.daily\" # ], # \"hourly\": [ # \"pos_kiosk.tasks.hourly\" #", "# website user home page (by Role) # role_home_page =", ": \"public/js/doctype_list.js\"} # doctype_tree_js = {\"doctype\" : \"public/js/doctype_tree.js\"} # doctype_calendar_js", "app_email = \"<EMAIL>\" app_license = \"MIT\" # Includes in <head>", "header of desk.html # app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js =", "# app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include", "app_description = \"Kiosk App\" app_icon = \"octicon octicon-file-directory\" app_color =", "in doctype views # doctype_js = {\"doctype\" : \"public/js/doctype.js\"} #", "page_js = {\"page\" : \"public/js/file.js\"} # page_js = { #", "= \"login\" # website user home page (by Role) #", "scheduler_events = { # \"all\": [ # \"pos_kiosk.tasks.all\" # ],", "js in page # page_js = {\"page\" : \"public/js/file.js\"} #", "(by function) # get_website_user_home_page = \"pos_kiosk.utils.get_home_page\" # Generators # ----------", "\"method\", # \"on_trash\": \"method\" # } # } # Scheduled", "} # # has_permission = { # \"Event\": \"frappe.desk.doctype.event.event.has_permission\", #", "[ # \"pos_kiosk.tasks.hourly\" # ], # \"weekly\": [ # \"pos_kiosk.tasks.weekly\"", "\"public/js/doctype_calendar.js\"} fixtures = [ { \"doctype\": \"Custom Field\", \"filters\": [", "---------- # application home page (will override Website Settings) #", "# } # Website user home page (by function) #", "\"pos_kiosk.tasks.all\" # ], # \"daily\": [ # \"pos_kiosk.tasks.daily\" # ],", "[ \"name\", \"in\", [ \"Sales Invoice Item-pos_kiosk\", \"Mode of Payment-logo\"", "Field\", \"filters\": [ [ \"name\", \"in\", [ \"Sales Invoice Item-pos_kiosk\",", "page (by Role) # role_home_page = { # \"Role\": \"home_page\"", "Role) # role_home_page = { # \"Role\": \"home_page\" # }", "{ \"doctype\": \"Custom Field\", \"filters\": [ [ \"name\", \"in\", [", "include js in doctype views # doctype_js = {\"doctype\" :", "# automatically create page for each record of this doctype", "# \"hourly\": [ # \"pos_kiosk.tasks.hourly\" # ], # \"weekly\": [", "in scripted ways # permission_query_conditions = { # \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\",", "# doctype_tree_js = {\"doctype\" : \"public/js/doctype_tree.js\"} # doctype_calendar_js = {\"doctype\"", "\"on_cancel\": \"method\", # \"on_trash\": \"method\" # } # } #", "# } # Document Events # --------------- # Hook on", "app_color = \"grey\" app_email = \"<EMAIL>\" app_license = \"MIT\" #", "Website user home page (by function) # get_website_user_home_page = \"pos_kiosk.utils.get_home_page\"", "Permissions # ----------- # Permissions evaluated in scripted ways #", "\"/assets/pos_kiosk/js/pos_kiosk.js\" # include js, css files in header of web", "before_install = \"pos_kiosk.install.before_install\" # after_install = \"pos_kiosk.install.after_install\" # Desk Notifications", "user home page (by Role) # role_home_page = { #", "# ---------- # automatically create page for each record of", "\"pos_kiosk.tasks.monthly\" # ] # } # Testing # ------- #", "of web template # web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js =", "# -*- coding: utf-8 -*- from __future__ import unicode_literals from", "] ] } ] # Home Pages # ---------- #", "in header of web template # web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" #", "# ------------------ # See frappe.core.notifications.get_notification_config # notification_config = \"pos_kiosk.notifications.get_notification_config\" #", "\"public/js/doctype.js\"} # doctype_list_js = {\"doctype\" : \"public/js/doctype_list.js\"} # doctype_tree_js =", "and events # doc_events = { # \"*\": { #", "\"pos_kiosk.utils.get_home_page\" # Generators # ---------- # automatically create page for", "home page (will override Website Settings) # home_page = \"login\"", "# Hook on document methods and events # doc_events =", "Document Events # --------------- # Hook on document methods and", "# scheduler_events = { # \"all\": [ # \"pos_kiosk.tasks.all\" #", "# # has_permission = { # \"Event\": \"frappe.desk.doctype.event.event.has_permission\", # }", "# Installation # ------------ # before_install = \"pos_kiosk.install.before_install\" # after_install", "# } # } # Scheduled Tasks # --------------- #", "octicon-file-directory\" app_color = \"grey\" app_email = \"<EMAIL>\" app_license = \"MIT\"", "= \"Kiosk App\" app_icon = \"octicon octicon-file-directory\" app_color = \"grey\"", "\"pos_kiosk.install.before_install\" # after_install = \"pos_kiosk.install.after_install\" # Desk Notifications # ------------------", "] # \"monthly\": [ # \"pos_kiosk.tasks.monthly\" # ] # }", "# Includes in <head> # ------------------ # include js, css", "[\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] # } # include js in doctype views", "# permission_query_conditions = { # \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", # } #", "# website_generators = [\"Web Page\"] # Installation # ------------ #", "evaluated in scripted ways # permission_query_conditions = { # \"Event\":", "# \"pos_kiosk.tasks.all\" # ], # \"daily\": [ # \"pos_kiosk.tasks.daily\" #", "} # Document Events # --------------- # Hook on document", "\"pos_kiosk.install.after_install\" # Desk Notifications # ------------------ # See frappe.core.notifications.get_notification_config #", "{\"page\" : \"public/js/file.js\"} # page_js = { # \"kiosk\": [\"public/js/pos_page_js.js\",", "this doctype # website_generators = [\"Web Page\"] # Installation #", "# Scheduled Tasks # --------------- # scheduler_events = { #", "# ------- # before_tests = \"pos_kiosk.install.before_tests\" # Overriding Whitelisted Methods", "Includes in <head> # ------------------ # include js, css files", "-*- coding: utf-8 -*- from __future__ import unicode_literals from .", "app_title = \"Pos Kiosk\" app_publisher = \"9t9it\" app_description = \"Kiosk", "[\"Web Page\"] # Installation # ------------ # before_install = \"pos_kiosk.install.before_install\"", "# include js, css files in header of desk.html #", "= { # \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", # } # # has_permission", "\"frappe.desk.doctype.event.event.get_permission_query_conditions\", # } # # has_permission = { # \"Event\":", "# Desk Notifications # ------------------ # See frappe.core.notifications.get_notification_config # notification_config", "= [ { \"doctype\": \"Custom Field\", \"filters\": [ [ \"name\",", "as app_version app_name = \"pos_kiosk\" app_title = \"Pos Kiosk\" app_publisher", "= { # \"Event\": \"frappe.desk.doctype.event.event.has_permission\", # } # Document Events", "doc_events = { # \"*\": { # \"on_update\": \"method\", #", "Home Pages # ---------- # application home page (will override", "document methods and events # doc_events = { # \"*\":", "Page\"] # Installation # ------------ # before_install = \"pos_kiosk.install.before_install\" #", "[ # \"pos_kiosk.tasks.monthly\" # ] # } # Testing #", "# --------------- # scheduler_events = { # \"all\": [ #", "views # doctype_js = {\"doctype\" : \"public/js/doctype.js\"} # doctype_list_js =", "\"Kiosk App\" app_icon = \"octicon octicon-file-directory\" app_color = \"grey\" app_email", "# page_js = { # \"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] # }", "home page (by function) # get_website_user_home_page = \"pos_kiosk.utils.get_home_page\" # Generators", "in <head> # ------------------ # include js, css files in", "] ] ] } ] # Home Pages # ----------", "app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js,", "------- # before_tests = \"pos_kiosk.install.before_tests\" # Overriding Whitelisted Methods #", "\"MIT\" # Includes in <head> # ------------------ # include js,", "# include js, css files in header of web template", "\"Custom Field\", \"filters\": [ [ \"name\", \"in\", [ \"Sales Invoice", "\"doctype\": \"Custom Field\", \"filters\": [ [ \"name\", \"in\", [ \"Sales", "from . import __version__ as app_version app_name = \"pos_kiosk\" app_title", "# \"daily\": [ # \"pos_kiosk.tasks.daily\" # ], # \"hourly\": [", "\"octicon octicon-file-directory\" app_color = \"grey\" app_email = \"<EMAIL>\" app_license =", "\"Event\": \"frappe.desk.doctype.event.event.has_permission\", # } # Document Events # --------------- #", "home page (by Role) # role_home_page = { # \"Role\":", "\"pos_kiosk.notifications.get_notification_config\" # Permissions # ----------- # Permissions evaluated in scripted", "--------------- # scheduler_events = { # \"all\": [ # \"pos_kiosk.tasks.all\"", "Settings) # home_page = \"login\" # website user home page", "# get_website_user_home_page = \"pos_kiosk.utils.get_home_page\" # Generators # ---------- # automatically", "# \"Event\": \"frappe.desk.doctype.event.event.has_permission\", # } # Document Events # ---------------", "= {\"page\" : \"public/js/file.js\"} # page_js = { # \"kiosk\":", "Generators # ---------- # automatically create page for each record", "\"filters\": [ [ \"name\", \"in\", [ \"Sales Invoice Item-pos_kiosk\", \"Mode", "# See frappe.core.notifications.get_notification_config # notification_config = \"pos_kiosk.notifications.get_notification_config\" # Permissions #", "override Website Settings) # home_page = \"login\" # website user", "\"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js in page", "coding: utf-8 -*- from __future__ import unicode_literals from . import", "# application home page (will override Website Settings) # home_page", "# \"all\": [ # \"pos_kiosk.tasks.all\" # ], # \"daily\": [", "} # include js in doctype views # doctype_js =", "# doctype_calendar_js = {\"doctype\" : \"public/js/doctype_calendar.js\"} fixtures = [ {", "events # doc_events = { # \"*\": { # \"on_update\":", "\"weekly\": [ # \"pos_kiosk.tasks.weekly\" # ] # \"monthly\": [ #", "\"frappe.desk.doctype.event.event.has_permission\", # } # Document Events # --------------- # Hook", "} # Testing # ------- # before_tests = \"pos_kiosk.install.before_tests\" #", "\"public/js/doctype_tree.js\"} # doctype_calendar_js = {\"doctype\" : \"public/js/doctype_calendar.js\"} fixtures = [", "{ # \"Role\": \"home_page\" # } # Website user home", "each record of this doctype # website_generators = [\"Web Page\"]", "of desk.html # app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\"", "js, css files in header of web template # web_include_css", "\"home_page\" # } # Website user home page (by function)", "= { # \"all\": [ # \"pos_kiosk.tasks.all\" # ], #", "= \"9t9it\" app_description = \"Kiosk App\" app_icon = \"octicon octicon-file-directory\"", "template # web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" #", "\"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", # } # # has_permission = { #", "# } # Testing # ------- # before_tests = \"pos_kiosk.install.before_tests\"", "doctype_list_js = {\"doctype\" : \"public/js/doctype_list.js\"} # doctype_tree_js = {\"doctype\" :", "= \"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js, css", "doctype_js = {\"doctype\" : \"public/js/doctype.js\"} # doctype_list_js = {\"doctype\" :", "website user home page (by Role) # role_home_page = {", "= {\"doctype\" : \"public/js/doctype_list.js\"} # doctype_tree_js = {\"doctype\" : \"public/js/doctype_tree.js\"}", "# ] # } # Testing # ------- # before_tests", "override_whitelisted_methods = { # \"pos_bahrain.api.get_item_details.get_item_details\": \"pos_kiosk.api.item.get_item_details\" # noqa # }", "{ # \"*\": { # \"on_update\": \"method\", # \"on_cancel\": \"method\",", "Overriding Whitelisted Methods # ------------------------------ # # override_whitelisted_methods = {", "# override_whitelisted_methods = { # \"pos_bahrain.api.get_item_details.get_item_details\": \"pos_kiosk.api.item.get_item_details\" # noqa #", "Whitelisted Methods # ------------------------------ # # override_whitelisted_methods = { #", "{ # \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", # } # # has_permission =", "------------ # before_install = \"pos_kiosk.install.before_install\" # after_install = \"pos_kiosk.install.after_install\" #", "{ # \"all\": [ # \"pos_kiosk.tasks.all\" # ], # \"daily\":", "= \"pos_kiosk.install.after_install\" # Desk Notifications # ------------------ # See frappe.core.notifications.get_notification_config", "# notification_config = \"pos_kiosk.notifications.get_notification_config\" # Permissions # ----------- # Permissions", "# home_page = \"login\" # website user home page (by", "website_generators = [\"Web Page\"] # Installation # ------------ # before_install", "# doc_events = { # \"*\": { # \"on_update\": \"method\",", "import __version__ as app_version app_name = \"pos_kiosk\" app_title = \"Pos", "# web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include", "from __future__ import unicode_literals from . import __version__ as app_version", "app_license = \"MIT\" # Includes in <head> # ------------------ #", "\"method\", # \"on_cancel\": \"method\", # \"on_trash\": \"method\" # } #", "on document methods and events # doc_events = { #", "# doctype_js = {\"doctype\" : \"public/js/doctype.js\"} # doctype_list_js = {\"doctype\"", "= \"pos_kiosk.notifications.get_notification_config\" # Permissions # ----------- # Permissions evaluated in", "Invoice Item-pos_kiosk\", \"Mode of Payment-logo\" ] ] ] } ]", "\"on_trash\": \"method\" # } # } # Scheduled Tasks #", "# --------------- # Hook on document methods and events #", "app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js, css files in header", "[ [ \"name\", \"in\", [ \"Sales Invoice Item-pos_kiosk\", \"Mode of", "-*- from __future__ import unicode_literals from . import __version__ as", "doctype # website_generators = [\"Web Page\"] # Installation # ------------", "# } # Scheduled Tasks # --------------- # scheduler_events =", "------------------------------ # # override_whitelisted_methods = { # \"pos_bahrain.api.get_item_details.get_item_details\": \"pos_kiosk.api.item.get_item_details\" #", "= \"pos_kiosk.utils.get_home_page\" # Generators # ---------- # automatically create page", "# ], # \"hourly\": [ # \"pos_kiosk.tasks.hourly\" # ], #", "# \"on_cancel\": \"method\", # \"on_trash\": \"method\" # } # }", "# app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js, css files in", "# } # include js in doctype views # doctype_js", "\"Role\": \"home_page\" # } # Website user home page (by", "\"Pos Kiosk\" app_publisher = \"9t9it\" app_description = \"Kiosk App\" app_icon", "# ------------ # before_install = \"pos_kiosk.install.before_install\" # after_install = \"pos_kiosk.install.after_install\"", "\"pos_kiosk.tasks.weekly\" # ] # \"monthly\": [ # \"pos_kiosk.tasks.monthly\" # ]", "= {\"doctype\" : \"public/js/doctype.js\"} # doctype_list_js = {\"doctype\" : \"public/js/doctype_list.js\"}", "# \"Role\": \"home_page\" # } # Website user home page", "\"monthly\": [ # \"pos_kiosk.tasks.monthly\" # ] # } # Testing", "page (by function) # get_website_user_home_page = \"pos_kiosk.utils.get_home_page\" # Generators #", "page (will override Website Settings) # home_page = \"login\" #", ": \"public/js/file.js\"} # page_js = { # \"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"]", "\"login\" # website user home page (by Role) # role_home_page", "], # \"hourly\": [ # \"pos_kiosk.tasks.hourly\" # ], # \"weekly\":", "[ { \"doctype\": \"Custom Field\", \"filters\": [ [ \"name\", \"in\",", "\"pos_kiosk.tasks.daily\" # ], # \"hourly\": [ # \"pos_kiosk.tasks.hourly\" # ],", "# page_js = {\"page\" : \"public/js/file.js\"} # page_js = {", "\"public/js/includes/number_to_words.js\"] # } # include js in doctype views #", "notification_config = \"pos_kiosk.notifications.get_notification_config\" # Permissions # ----------- # Permissions evaluated", "# ] # \"monthly\": [ # \"pos_kiosk.tasks.monthly\" # ] #", "{\"doctype\" : \"public/js/doctype_tree.js\"} # doctype_calendar_js = {\"doctype\" : \"public/js/doctype_calendar.js\"} fixtures", "# ], # \"weekly\": [ # \"pos_kiosk.tasks.weekly\" # ] #", "header of web template # web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js", "\"public/js/file.js\"} # page_js = { # \"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] #", "# web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js in page #", "Desk Notifications # ------------------ # See frappe.core.notifications.get_notification_config # notification_config =", "get_website_user_home_page = \"pos_kiosk.utils.get_home_page\" # Generators # ---------- # automatically create", "\"name\", \"in\", [ \"Sales Invoice Item-pos_kiosk\", \"Mode of Payment-logo\" ]", "\"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js, css files", "# after_install = \"pos_kiosk.install.after_install\" # Desk Notifications # ------------------ #", "} # } # Scheduled Tasks # --------------- # scheduler_events", "# Overriding Whitelisted Methods # ------------------------------ # # override_whitelisted_methods =" ]
[ "= Embedding(self._vocab_size, 64)(question) eb_question = Dropout(0.3)(eb_question) conc = concatenate([eb_story, eb_question],", "KerasModel class SimpleLSTM(KerasModel): \"\"\" Use a simple lstm neural network", "name='story') question = Input((self._query_maxlen, ), name='question') conc = concatenate([story, question],)", "KerasModel.default_config() config['hidden'] = 32 return config def __init__(self, model_cfg): super().__init__(model_cfg)", "Reshape((1, int(conc.shape[1])))(conc) conc = Permute((2, 1))(conc) response = LSTM(hidden, dropout=0.2,", "import Adam from pypagai.models.base import KerasModel class SimpleLSTM(KerasModel): \"\"\" Use", "= self._cfg['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen,", "EmbedLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\" @staticmethod", "64)(question) eb_question = Dropout(0.3)(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response", "concatenate([story, question],) conc = Reshape((1, int(conc.shape[1])))(conc) conc = Permute((2, 1))(conc)", "recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response)", "LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten from keras.optimizers import", "Embedding(self._vocab_size, 64)(question) eb_question = Dropout(0.3)(eb_question) conc = concatenate([eb_story, eb_question], axis=1)", "= Input((self._query_maxlen, ), name='question') conc = concatenate([story, question],) conc =", "64)(question) eb_question = Convolution1D(64, 3, padding='same')(eb_question) eb_question = Convolution1D(32, 3,", "class SimpleLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\"", "def default_config(): config = KerasModel.default_config() config['hidden'] = 32 return config", "eb_question = Convolution1D(32, 3, padding='same')(eb_question) eb_question = Convolution1D(16, 3, padding='same')(eb_question)", "Dropout(0.3)(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden, dropout=0.2,", "Dropout, Convolution1D, Flatten from keras.optimizers import Adam from pypagai.models.base import", "= concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response", "Model, Input from keras.layers import Dense, concatenate, LSTM, Reshape, Permute,", "Adam from pypagai.models.base import KerasModel class SimpleLSTM(KerasModel): \"\"\" Use a", "config['hidden'] = 32 return config def __init__(self, model_cfg): super().__init__(model_cfg) self._cfg", "loss='sparse_categorical_crossentropy', metrics=['accuracy']) class ConvLSTM(KerasModel): \"\"\" Use a simple lstm neural", "Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten from", "keras import Model, Input from keras.layers import Dense, concatenate, LSTM,", "), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Dropout(0.3)(eb_story) eb_question", "eb_story = Flatten()(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Convolution1D(64,", "Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class ConvLSTM(KerasModel): \"\"\" Use", "from keras import Model, Input from keras.layers import Dense, concatenate,", "= Embedding(self._vocab_size, 64)(story) eb_story = Dropout(0.3)(eb_story) eb_question = Embedding(self._vocab_size, 64)(question)", "class EmbedLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\"", "= 32 return config def __init__(self, cfg): super().__init__(cfg) self._cfg_ =", "a simple lstm neural network \"\"\" @staticmethod def default_config(): config", "), name='question') conc = concatenate([story, question],) conc = Reshape((1, int(conc.shape[1])))(conc)", "lstm neural network \"\"\" @staticmethod def default_config(): config = KerasModel.default_config()", "), name='story') question = Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size,", "Convolution1D(16, 3, padding='same')(eb_question) # eb_question = Flatten()(eb_question) conc = concatenate([eb_story,", "def _create_network_(self): hidden = self._cfg_['hidden'] story = Input((self._story_maxlen, ), name='story')", "config def __init__(self, model_cfg): super().__init__(model_cfg) self._cfg = model_cfg def _create_network_(self):", "eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Convolution1D(64, 3, padding='same')(eb_story) eb_story", "Embedding(self._vocab_size, 64)(story) eb_story = Dropout(0.3)(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question", "= Embedding(self._vocab_size, 64)(question) eb_question = Convolution1D(64, 3, padding='same')(eb_question) eb_question =", "eb_question], axis=1) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size,", "Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Dropout(0.3)(eb_story)", "config def __init__(self, cfg): super().__init__(cfg) self._cfg_ = cfg def _create_network_(self):", "eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Convolution1D(64, 3, padding='same')(eb_question) eb_question", "= Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class ConvLSTM(KerasModel): \"\"\"", "= Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class EmbedLSTM(KerasModel): \"\"\"", "axis=1) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response)", "Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy'])", "model_cfg def _create_network_(self): hidden = self._cfg['hidden'] story = Input((self._story_maxlen, ),", "concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response =", "eb_story = Convolution1D(64, 3, padding='same')(eb_story) eb_story = Convolution1D(32, 3, padding='same')(eb_story)", "= Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') eb_story", "Permute, Embedding, Dropout, Convolution1D, Flatten from keras.optimizers import Adam from", "eb_question = Flatten()(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response =", "question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class EmbedLSTM(KerasModel): \"\"\" Use a", "name='story') question = Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size, 64)(story)", "__init__(self, cfg): super().__init__(cfg) self._cfg_ = cfg def _create_network_(self): hidden =", "\"\"\" @staticmethod def default_config(): config = KerasModel.default_config() config['hidden'] = 32", "= Convolution1D(32, 3, padding='same')(eb_story) eb_story = Convolution1D(16, 3, padding='same')(eb_story) #", "<gh_stars>1-10 from keras import Model, Input from keras.layers import Dense,", "question],) conc = Reshape((1, int(conc.shape[1])))(conc) conc = Permute((2, 1))(conc) response", "= LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model =", "padding='same')(eb_question) # eb_question = Flatten()(eb_question) conc = concatenate([eb_story, eb_question], axis=1)", "Convolution1D(64, 3, padding='same')(eb_question) eb_question = Convolution1D(32, 3, padding='same')(eb_question) eb_question =", "Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Convolution1D(64,", "self._cfg_['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ),", "64)(story) eb_story = Dropout(0.3)(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question =", "question = Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story", "name='question') conc = concatenate([story, question],) conc = Reshape((1, int(conc.shape[1])))(conc) conc", "ConvLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\" @staticmethod", "eb_story = Convolution1D(32, 3, padding='same')(eb_story) eb_story = Convolution1D(16, 3, padding='same')(eb_story)", "3, padding='same')(eb_story) # eb_story = Flatten()(eb_story) eb_question = Embedding(self._vocab_size, 64)(question)", "self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class ConvLSTM(KerasModel): \"\"\" Use a simple lstm", "loss='sparse_categorical_crossentropy', metrics=['accuracy']) class EmbedLSTM(KerasModel): \"\"\" Use a simple lstm neural", "Convolution1D(32, 3, padding='same')(eb_story) eb_story = Convolution1D(16, 3, padding='same')(eb_story) # eb_story", "def __init__(self, model_cfg): super().__init__(model_cfg) self._cfg = model_cfg def _create_network_(self): hidden", "story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question')", "dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story, question],", "conc = Permute((2, 1))(conc) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response", "3, padding='same')(eb_story) eb_story = Convolution1D(32, 3, padding='same')(eb_story) eb_story = Convolution1D(16,", "Input((self._query_maxlen, ), name='question') conc = concatenate([story, question],) conc = Reshape((1,", "_create_network_(self): hidden = self._cfg_['hidden'] story = Input((self._story_maxlen, ), name='story') question", "= Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') conc", "= model_cfg def _create_network_(self): hidden = self._cfg['hidden'] story = Input((self._story_maxlen,", "), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Convolution1D(64, 3,", "Embedding, Dropout, Convolution1D, Flatten from keras.optimizers import Adam from pypagai.models.base", "Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class EmbedLSTM(KerasModel): \"\"\" Use", "Flatten()(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden, dropout=0.2,", "Convolution1D(64, 3, padding='same')(eb_story) eb_story = Convolution1D(32, 3, padding='same')(eb_story) eb_story =", "conc = Reshape((1, int(conc.shape[1])))(conc) conc = Permute((2, 1))(conc) response =", "metrics=['accuracy']) class ConvLSTM(KerasModel): \"\"\" Use a simple lstm neural network", "Use a simple lstm neural network \"\"\" @staticmethod def default_config():", "Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') eb_story =", "3, padding='same')(eb_question) eb_question = Convolution1D(32, 3, padding='same')(eb_question) eb_question = Convolution1D(16,", "self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class EmbedLSTM(KerasModel): \"\"\" Use a simple lstm", "keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D,", "self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class ConvLSTM(KerasModel):", "eb_story = Dropout(0.3)(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Dropout(0.3)(eb_question)", "response = Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4),", "default_config(): config = KerasModel.default_config() config['hidden'] = 32 return config def", "Embedding(self._vocab_size, 64)(story) eb_story = Convolution1D(64, 3, padding='same')(eb_story) eb_story = Convolution1D(32,", "Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') conc =", "self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class EmbedLSTM(KerasModel):", "padding='same')(eb_story) # eb_story = Flatten()(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question", "hidden = self._cfg['hidden'] story = Input((self._story_maxlen, ), name='story') question =", "metrics=['accuracy']) class EmbedLSTM(KerasModel): \"\"\" Use a simple lstm neural network", "32 return config def __init__(self, cfg): super().__init__(cfg) self._cfg_ = cfg", "cfg def _create_network_(self): hidden = self._cfg_['hidden'] story = Input((self._story_maxlen, ),", "Permute((2, 1))(conc) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size,", "simple lstm neural network \"\"\" @staticmethod def default_config(): config =", "import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten", "import KerasModel class SimpleLSTM(KerasModel): \"\"\" Use a simple lstm neural", "_create_network_(self): hidden = self._cfg['hidden'] story = Input((self._story_maxlen, ), name='story') question", "outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class EmbedLSTM(KerasModel): \"\"\" Use a simple", "eb_question = Convolution1D(16, 3, padding='same')(eb_question) # eb_question = Flatten()(eb_question) conc", "import Model, Input from keras.layers import Dense, concatenate, LSTM, Reshape,", "from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout,", "super().__init__(cfg) self._cfg_ = cfg def _create_network_(self): hidden = self._cfg_['hidden'] story", "64)(story) eb_story = Convolution1D(64, 3, padding='same')(eb_story) eb_story = Convolution1D(32, 3,", "eb_question = Dropout(0.3)(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response =", "keras.optimizers import Adam from pypagai.models.base import KerasModel class SimpleLSTM(KerasModel): \"\"\"", "self._cfg['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ),", "padding='same')(eb_question) eb_question = Convolution1D(32, 3, padding='same')(eb_question) eb_question = Convolution1D(16, 3,", "padding='same')(eb_question) eb_question = Convolution1D(16, 3, padding='same')(eb_question) # eb_question = Flatten()(eb_question)", "cfg): super().__init__(cfg) self._cfg_ = cfg def _create_network_(self): hidden = self._cfg_['hidden']", "question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class ConvLSTM(KerasModel): \"\"\" Use a", "= Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy',", "= Reshape((1, int(conc.shape[1])))(conc) conc = Permute((2, 1))(conc) response = LSTM(hidden,", "padding='same')(eb_story) eb_story = Convolution1D(32, 3, padding='same')(eb_story) eb_story = Convolution1D(16, 3,", "__init__(self, model_cfg): super().__init__(model_cfg) self._cfg = model_cfg def _create_network_(self): hidden =", "class ConvLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\"", "Convolution1D, Flatten from keras.optimizers import Adam from pypagai.models.base import KerasModel", "int(conc.shape[1])))(conc) conc = Permute((2, 1))(conc) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc)", "hidden = self._cfg_['hidden'] story = Input((self._story_maxlen, ), name='story') question =", "= 32 return config def __init__(self, model_cfg): super().__init__(model_cfg) self._cfg =", "= Convolution1D(16, 3, padding='same')(eb_story) # eb_story = Flatten()(eb_story) eb_question =", "self._cfg = model_cfg def _create_network_(self): hidden = self._cfg['hidden'] story =", "= Flatten()(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Convolution1D(64, 3,", "= Permute((2, 1))(conc) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response =", "model_cfg): super().__init__(model_cfg) self._cfg = model_cfg def _create_network_(self): hidden = self._cfg['hidden']", "Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten from keras.optimizers import Adam", "eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Dropout(0.3)(eb_question) conc = concatenate([eb_story,", "config['hidden'] = 32 return config def __init__(self, cfg): super().__init__(cfg) self._cfg_", "neural network \"\"\" @staticmethod def default_config(): config = KerasModel.default_config() config['hidden']", "Dropout(0.3)(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Dropout(0.3)(eb_question) conc =", "= Flatten()(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden,", "= Dropout(0.3)(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden,", "question = Input((self._query_maxlen, ), name='question') conc = concatenate([story, question],) conc", "activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class", "Input from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding,", "return config def __init__(self, model_cfg): super().__init__(model_cfg) self._cfg = model_cfg def", "def __init__(self, cfg): super().__init__(cfg) self._cfg_ = cfg def _create_network_(self): hidden", "KerasModel.default_config() config['hidden'] = 32 return config def __init__(self, cfg): super().__init__(cfg)", "Flatten()(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Convolution1D(64, 3, padding='same')(eb_question)", "= Convolution1D(64, 3, padding='same')(eb_question) eb_question = Convolution1D(32, 3, padding='same')(eb_question) eb_question", "pypagai.models.base import KerasModel class SimpleLSTM(KerasModel): \"\"\" Use a simple lstm", "Flatten from keras.optimizers import Adam from pypagai.models.base import KerasModel class", "3, padding='same')(eb_question) # eb_question = Flatten()(eb_question) conc = concatenate([eb_story, eb_question],", "1))(conc) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response)", "= KerasModel.default_config() config['hidden'] = 32 return config def __init__(self, model_cfg):", "= concatenate([story, question],) conc = Reshape((1, int(conc.shape[1])))(conc) conc = Permute((2,", "Convolution1D(16, 3, padding='same')(eb_story) # eb_story = Flatten()(eb_story) eb_question = Embedding(self._vocab_size,", "= KerasModel.default_config() config['hidden'] = 32 return config def __init__(self, cfg):", "32 return config def __init__(self, model_cfg): super().__init__(model_cfg) self._cfg = model_cfg", "\"\"\" Use a simple lstm neural network \"\"\" @staticmethod def", "= self._cfg_['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen,", "= Convolution1D(64, 3, padding='same')(eb_story) eb_story = Convolution1D(32, 3, padding='same')(eb_story) eb_story", "), name='story') question = Input((self._query_maxlen, ), name='question') conc = concatenate([story,", "super().__init__(model_cfg) self._cfg = model_cfg def _create_network_(self): hidden = self._cfg['hidden'] story", "eb_question = Convolution1D(64, 3, padding='same')(eb_question) eb_question = Convolution1D(32, 3, padding='same')(eb_question)", "3, padding='same')(eb_story) eb_story = Convolution1D(16, 3, padding='same')(eb_story) # eb_story =", "= Convolution1D(16, 3, padding='same')(eb_question) # eb_question = Flatten()(eb_question) conc =", "@staticmethod def default_config(): config = KerasModel.default_config() config['hidden'] = 32 return", "# eb_question = Flatten()(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response", "config = KerasModel.default_config() config['hidden'] = 32 return config def __init__(self,", "conc = concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc)", "SimpleLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\" @staticmethod", "= Dropout(0.3)(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Dropout(0.3)(eb_question) conc", "network \"\"\" @staticmethod def default_config(): config = KerasModel.default_config() config['hidden'] =", "name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Convolution1D(64, 3, padding='same')(eb_story)", "eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Dropout(0.3)(eb_story) eb_question = Embedding(self._vocab_size,", "self._cfg_ = cfg def _create_network_(self): hidden = self._cfg_['hidden'] story =", "= cfg def _create_network_(self): hidden = self._cfg_['hidden'] story = Input((self._story_maxlen,", "LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story,", "conc = concatenate([story, question],) conc = Reshape((1, int(conc.shape[1])))(conc) conc =", "name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Dropout(0.3)(eb_story) eb_question =", "from pypagai.models.base import KerasModel class SimpleLSTM(KerasModel): \"\"\" Use a simple", "= Convolution1D(32, 3, padding='same')(eb_question) eb_question = Convolution1D(16, 3, padding='same')(eb_question) #", "def _create_network_(self): hidden = self._cfg['hidden'] story = Input((self._story_maxlen, ), name='story')", "outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class ConvLSTM(KerasModel): \"\"\" Use a simple", "Embedding(self._vocab_size, 64)(question) eb_question = Convolution1D(64, 3, padding='same')(eb_question) eb_question = Convolution1D(32,", "= Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story =", "return config def __init__(self, cfg): super().__init__(cfg) self._cfg_ = cfg def", "# eb_story = Flatten()(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question =", "eb_story = Convolution1D(16, 3, padding='same')(eb_story) # eb_story = Flatten()(eb_story) eb_question", "response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model", "Convolution1D(32, 3, padding='same')(eb_question) eb_question = Convolution1D(16, 3, padding='same')(eb_question) # eb_question", "concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten from keras.optimizers", "from keras.optimizers import Adam from pypagai.models.base import KerasModel class SimpleLSTM(KerasModel):", "padding='same')(eb_story) eb_story = Convolution1D(16, 3, padding='same')(eb_story) # eb_story = Flatten()(eb_story)", "3, padding='same')(eb_question) eb_question = Convolution1D(16, 3, padding='same')(eb_question) # eb_question =", "= Embedding(self._vocab_size, 64)(story) eb_story = Convolution1D(64, 3, padding='same')(eb_story) eb_story =" ]
[]
[ "activationName self.bn_name = bnName self.first_output = num_planes self.in_planes = self.first_output", "from easyai.model.base_block.cls.pnasnet_block import CellA, CellB __all__ = ['pnasnet_A', 'pnasnet_B'] class", "def downsample(self, planes): down_block = self.block(self.in_planes, planes, stride=2, bn_name=self.bn_name, activation_name=self.activation_name)", "PNASNet in PyTorch. Paper: Progressive Neural Architecture Search ''' from", "self.in_planes = planes def forward(self, x): output_list = [] for", "layer1, self.first_output) self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output * 2) self.make_layer(self.first_output * 2,", "self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output * 2) self.make_layer(self.first_output * 2, self.num_cells) self.downsample(self.first_output", "in range(num_cells): temp_block = self.block(self.in_planes, planes, stride=1, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(),", "pnasnet_A(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA) return model", "x): output_list = [] for block in self._modules.values(): x =", "block self.activation_name = activationName self.bn_name = bnName self.first_output = num_planes", "block=CellA) model.set_name(BackboneName.PNASNetA) return model def pnasnet_B(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6,", "* 2, self.num_cells) self.downsample(self.first_output * 4) self.make_layer(self.first_output * 4, self.num_cells)", "padding=1, bias=False, bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1, self.first_output) self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output", "= activationName self.bn_name = bnName self.first_output = num_planes self.in_planes =", "_ in range(num_cells): temp_block = self.block(self.in_planes, planes, stride=1, bn_name=self.bn_name, activation_name=self.activation_name)", "temp_block, planes) self.in_planes = planes def downsample(self, planes): down_block =", "Author: ''' PNASNet in PyTorch. Paper: Progressive Neural Architecture Search", "= [] self.index = 0 layer1 = ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3,", "from easyai.base_name.backbone_name import BackboneName from easyai.model.backbone.utility.base_backbone import * from easyai.model.base_block.utility.utility_block", "= data_channel self.num_cells = num_cells self.block = block self.activation_name =", "self.num_cells) def make_layer(self, planes, num_cells): for _ in range(num_cells): temp_block", "2) self.make_layer(self.first_output * 2, self.num_cells) self.downsample(self.first_output * 4) self.make_layer(self.first_output *", "def pnasnet_A(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA) return", "bias=False, bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1, self.first_output) self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output *", "model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA) return model def", "def make_layer(self, planes, num_cells): for _ in range(num_cells): temp_block =", "model def pnasnet_B(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=32, block=CellB) model.set_name(BackboneName.PNASNetB)", "Paper: Progressive Neural Architecture Search ''' from easyai.base_name.block_name import NormalizationType,", "num_cells=6, num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel = data_channel", "* 2) self.make_layer(self.first_output * 2, self.num_cells) self.downsample(self.first_output * 4) self.make_layer(self.first_output", "x = block(x) output_list.append(x) return output_list def pnasnet_A(data_channel): model =", "self._modules.values(): x = block(x) output_list.append(x) return output_list def pnasnet_A(data_channel): model", "self.bn_name = bnName self.first_output = num_planes self.in_planes = self.first_output self.create_block_list()", "self.block_out_channels = [] self.index = 0 layer1 = ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output,", "__all__ = ['pnasnet_A', 'pnasnet_B'] class PNASNet(BaseBackbone): def __init__(self, data_channel=3, num_cells=6,", "python # -*- coding:utf-8 -*- # Author: ''' PNASNet in", "super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel = data_channel self.num_cells = num_cells self.block =", "bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel = data_channel self.num_cells = num_cells", "self.first_output) self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output * 2) self.make_layer(self.first_output * 2, self.num_cells)", "planes, stride=1, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block, planes) self.in_planes = planes", "planes): down_block = self.block(self.in_planes, planes, stride=2, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block,", "num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA) return model def pnasnet_B(data_channel): model = PNASNet(data_channel=data_channel,", "4, self.num_cells) def make_layer(self, planes, num_cells): for _ in range(num_cells):", "planes def forward(self, x): output_list = [] for block in", "self.num_cells) self.downsample(self.first_output * 4) self.make_layer(self.first_output * 4, self.num_cells) def make_layer(self,", "import NormalizationType, ActivationType from easyai.base_name.backbone_name import BackboneName from easyai.model.backbone.utility.base_backbone import", "self.block(self.in_planes, planes, stride=1, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block, planes) self.in_planes =", "= 0 layer1 = ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3, stride=1, padding=1, bias=False,", "= self.block(self.in_planes, planes, stride=1, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block, planes) self.in_planes", "pnasnet_B(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=32, block=CellB) model.set_name(BackboneName.PNASNetB) return model", "ActivationType from easyai.base_name.backbone_name import BackboneName from easyai.model.backbone.utility.base_backbone import * from", "from easyai.model.backbone.utility.base_backbone import * from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block", "Search ''' from easyai.base_name.block_name import NormalizationType, ActivationType from easyai.base_name.backbone_name import", "data_channel=3, num_cells=6, num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel =", "return model def pnasnet_B(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=32, block=CellB)", "self.add_block_list(layer1.get_name(), layer1, self.first_output) self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output * 2) self.make_layer(self.first_output *", "# -*- coding:utf-8 -*- # Author: ''' PNASNet in PyTorch.", "stride=1, padding=1, bias=False, bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1, self.first_output) self.make_layer(self.first_output, self.num_cells)", "activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1, self.first_output) self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output * 2) self.make_layer(self.first_output", "[] self.index = 0 layer1 = ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3, stride=1,", "import * from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block import CellA,", "self.first_output self.create_block_list() def create_block_list(self): self.block_out_channels = [] self.index = 0", "self.num_cells = num_cells self.block = block self.activation_name = activationName self.bn_name", "self.block = block self.activation_name = activationName self.bn_name = bnName self.first_output", "planes) self.in_planes = planes def downsample(self, planes): down_block = self.block(self.in_planes,", "num_cells): for _ in range(num_cells): temp_block = self.block(self.in_planes, planes, stride=1,", "num_cells=6, num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA) return model def pnasnet_B(data_channel): model =", "num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel = data_channel self.num_cells", "= block self.activation_name = activationName self.bn_name = bnName self.first_output =", "easyai.model.backbone.utility.base_backbone import * from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block import", "from easyai.base_name.block_name import NormalizationType, ActivationType from easyai.base_name.backbone_name import BackboneName from", "create_block_list(self): self.block_out_channels = [] self.index = 0 layer1 = ConvBNActivationBlock(in_channels=self.data_channel,", "BackboneName from easyai.model.backbone.utility.base_backbone import * from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock from", "bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1, self.first_output) self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output * 2)", "in PyTorch. Paper: Progressive Neural Architecture Search ''' from easyai.base_name.block_name", "easyai.model.base_block.cls.pnasnet_block import CellA, CellB __all__ = ['pnasnet_A', 'pnasnet_B'] class PNASNet(BaseBackbone):", "__init__(self, data_channel=3, num_cells=6, num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel", "'pnasnet_B'] class PNASNet(BaseBackbone): def __init__(self, data_channel=3, num_cells=6, num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d,", "return output_list def pnasnet_A(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=44, block=CellA)", "self.in_planes = planes def downsample(self, planes): down_block = self.block(self.in_planes, planes,", "temp_block = self.block(self.in_planes, planes, stride=1, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block, planes)", "easyai.model.base_block.utility.utility_block import ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block import CellA, CellB __all__ =", "planes, num_cells): for _ in range(num_cells): temp_block = self.block(self.in_planes, planes,", "bnName self.first_output = num_planes self.in_planes = self.first_output self.create_block_list() def create_block_list(self):", "self.set_name(BackboneName.PNASNetA) self.data_channel = data_channel self.num_cells = num_cells self.block = block", "easyai.base_name.backbone_name import BackboneName from easyai.model.backbone.utility.base_backbone import * from easyai.model.base_block.utility.utility_block import", "downsample(self, planes): down_block = self.block(self.in_planes, planes, stride=2, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(),", "down_block, planes) self.in_planes = planes def forward(self, x): output_list =", "from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block import CellA, CellB __all__", "stride=1, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block, planes) self.in_planes = planes def", "self.make_layer(self.first_output * 4, self.num_cells) def make_layer(self, planes, num_cells): for _", "PyTorch. Paper: Progressive Neural Architecture Search ''' from easyai.base_name.block_name import", "for _ in range(num_cells): temp_block = self.block(self.in_planes, planes, stride=1, bn_name=self.bn_name,", "self.data_channel = data_channel self.num_cells = num_cells self.block = block self.activation_name", "self.block(self.in_planes, planes, stride=2, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block, planes) self.in_planes =", "[] for block in self._modules.values(): x = block(x) output_list.append(x) return", "= block(x) output_list.append(x) return output_list def pnasnet_A(data_channel): model = PNASNet(data_channel=data_channel,", "block(x) output_list.append(x) return output_list def pnasnet_A(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6,", "-*- # Author: ''' PNASNet in PyTorch. Paper: Progressive Neural", "def create_block_list(self): self.block_out_channels = [] self.index = 0 layer1 =", "#!/usr/bin/env python # -*- coding:utf-8 -*- # Author: ''' PNASNet", "num_cells self.block = block self.activation_name = activationName self.bn_name = bnName", "output_list = [] for block in self._modules.values(): x = block(x)", "self.index = 0 layer1 = ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3, stride=1, padding=1,", "PNASNet(data_channel=data_channel, num_cells=6, num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA) return model def pnasnet_B(data_channel): model", "def pnasnet_B(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=32, block=CellB) model.set_name(BackboneName.PNASNetB) return", "in self._modules.values(): x = block(x) output_list.append(x) return output_list def pnasnet_A(data_channel):", "= PNASNet(data_channel=data_channel, num_cells=6, num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA) return model def pnasnet_B(data_channel):", "model.set_name(BackboneName.PNASNetA) return model def pnasnet_B(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=32,", "def __init__(self, data_channel=3, num_cells=6, num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA)", "''' from easyai.base_name.block_name import NormalizationType, ActivationType from easyai.base_name.backbone_name import BackboneName", "* 4, self.num_cells) def make_layer(self, planes, num_cells): for _ in", "stride=2, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block, planes) self.in_planes = planes def", "activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block, planes) self.in_planes = planes def downsample(self, planes):", "= bnName self.first_output = num_planes self.in_planes = self.first_output self.create_block_list() def", "bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block, planes) self.in_planes = planes def forward(self,", "PNASNet(BaseBackbone): def __init__(self, data_channel=3, num_cells=6, num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__()", "forward(self, x): output_list = [] for block in self._modules.values(): x", "coding:utf-8 -*- # Author: ''' PNASNet in PyTorch. Paper: Progressive", "activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block, planes) self.in_planes = planes def forward(self, x):", "class PNASNet(BaseBackbone): def __init__(self, data_channel=3, num_cells=6, num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU):", "easyai.base_name.block_name import NormalizationType, ActivationType from easyai.base_name.backbone_name import BackboneName from easyai.model.backbone.utility.base_backbone", "self.in_planes = self.first_output self.create_block_list() def create_block_list(self): self.block_out_channels = [] self.index", "Architecture Search ''' from easyai.base_name.block_name import NormalizationType, ActivationType from easyai.base_name.backbone_name", "CellB __all__ = ['pnasnet_A', 'pnasnet_B'] class PNASNet(BaseBackbone): def __init__(self, data_channel=3,", "self.downsample(self.first_output * 4) self.make_layer(self.first_output * 4, self.num_cells) def make_layer(self, planes,", "make_layer(self, planes, num_cells): for _ in range(num_cells): temp_block = self.block(self.in_planes,", "def forward(self, x): output_list = [] for block in self._modules.values():", "self.first_output = num_planes self.in_planes = self.first_output self.create_block_list() def create_block_list(self): self.block_out_channels", "self.make_layer(self.first_output * 2, self.num_cells) self.downsample(self.first_output * 4) self.make_layer(self.first_output * 4,", "* from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block import CellA, CellB", "''' PNASNet in PyTorch. Paper: Progressive Neural Architecture Search '''", "Neural Architecture Search ''' from easyai.base_name.block_name import NormalizationType, ActivationType from", "for block in self._modules.values(): x = block(x) output_list.append(x) return output_list", "block in self._modules.values(): x = block(x) output_list.append(x) return output_list def", "ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block import CellA, CellB __all__ = ['pnasnet_A', 'pnasnet_B']", "self.downsample(self.first_output * 2) self.make_layer(self.first_output * 2, self.num_cells) self.downsample(self.first_output * 4)", "activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel = data_channel self.num_cells = num_cells self.block", "= num_cells self.block = block self.activation_name = activationName self.bn_name =", "['pnasnet_A', 'pnasnet_B'] class PNASNet(BaseBackbone): def __init__(self, data_channel=3, num_cells=6, num_planes=44, block=CellA,", "= planes def downsample(self, planes): down_block = self.block(self.in_planes, planes, stride=2,", "block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel = data_channel self.num_cells =", "CellA, CellB __all__ = ['pnasnet_A', 'pnasnet_B'] class PNASNet(BaseBackbone): def __init__(self,", "= ['pnasnet_A', 'pnasnet_B'] class PNASNet(BaseBackbone): def __init__(self, data_channel=3, num_cells=6, num_planes=44,", "import ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block import CellA, CellB __all__ = ['pnasnet_A',", "self.num_cells) self.downsample(self.first_output * 2) self.make_layer(self.first_output * 2, self.num_cells) self.downsample(self.first_output *", "data_channel self.num_cells = num_cells self.block = block self.activation_name = activationName", "self.add_block_list(temp_block.get_name(), temp_block, planes) self.in_planes = planes def downsample(self, planes): down_block", "output_list def pnasnet_A(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA)", "planes) self.in_planes = planes def forward(self, x): output_list = []", "= [] for block in self._modules.values(): x = block(x) output_list.append(x)", "layer1 = ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3, stride=1, padding=1, bias=False, bnName=self.bn_name, activationName=self.activation_name)", "= num_planes self.in_planes = self.first_output self.create_block_list() def create_block_list(self): self.block_out_channels =", "bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block, planes) self.in_planes = planes def downsample(self,", "self.activation_name = activationName self.bn_name = bnName self.first_output = num_planes self.in_planes", "NormalizationType, ActivationType from easyai.base_name.backbone_name import BackboneName from easyai.model.backbone.utility.base_backbone import *", "down_block = self.block(self.in_planes, planes, stride=2, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block, planes)", "output_list.append(x) return output_list def pnasnet_A(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=44,", "# Author: ''' PNASNet in PyTorch. Paper: Progressive Neural Architecture", "4) self.make_layer(self.first_output * 4, self.num_cells) def make_layer(self, planes, num_cells): for", "= self.block(self.in_planes, planes, stride=2, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block, planes) self.in_planes", "num_planes self.in_planes = self.first_output self.create_block_list() def create_block_list(self): self.block_out_channels = []", "planes def downsample(self, planes): down_block = self.block(self.in_planes, planes, stride=2, bn_name=self.bn_name,", "= planes def forward(self, x): output_list = [] for block", "import CellA, CellB __all__ = ['pnasnet_A', 'pnasnet_B'] class PNASNet(BaseBackbone): def", "2, self.num_cells) self.downsample(self.first_output * 4) self.make_layer(self.first_output * 4, self.num_cells) def", "0 layer1 = ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3, stride=1, padding=1, bias=False, bnName=self.bn_name,", "ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3, stride=1, padding=1, bias=False, bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1,", "kernel_size=3, stride=1, padding=1, bias=False, bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1, self.first_output) self.make_layer(self.first_output,", "range(num_cells): temp_block = self.block(self.in_planes, planes, stride=1, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block,", "self.add_block_list(down_block.get_name(), down_block, planes) self.in_planes = planes def forward(self, x): output_list", "* 4) self.make_layer(self.first_output * 4, self.num_cells) def make_layer(self, planes, num_cells):", "import BackboneName from easyai.model.backbone.utility.base_backbone import * from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock", "= ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3, stride=1, padding=1, bias=False, bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(),", "-*- coding:utf-8 -*- # Author: ''' PNASNet in PyTorch. Paper:", "out_channels=self.first_output, kernel_size=3, stride=1, padding=1, bias=False, bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1, self.first_output)", "self.create_block_list() def create_block_list(self): self.block_out_channels = [] self.index = 0 layer1", "planes, stride=2, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block, planes) self.in_planes = planes", "Progressive Neural Architecture Search ''' from easyai.base_name.block_name import NormalizationType, ActivationType", "= self.first_output self.create_block_list() def create_block_list(self): self.block_out_channels = [] self.index =" ]
[ "def get_access_token(token): resp = None request_count = 0 url =", "CONUS, 30m resolution \" \"SRTM between 60N and 60S, 30m", "global coverage of 1000m.\", \"extensions\": [\"watermask\", \"vertexnormals\", \"octvertexnormals\", ], \"format\":", "{ \"endX\": 1, \"endY\": 0, \"startX\": 0, \"startY\": 0 }", "is not None: self.logger.error(e) if __name__ == '__main__': if 1:", "token self._init_metadata( format='terrain', bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat)) def", "token, thread_num, logger=None, write_db=False): super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger, write_db=write_db) self.root_dir", "\" \"information funded by the European Union - EU-DEM layers\",", "class TerrainDownloadEngine(DownloadEngine): root_dir = '' def __init__(self, root_dir, bbox, token,", "d.start() time.sleep(10000) logger.error('main thread out') except Exception as e: logger.error(e)", "= self.get_task_queue(bbox) self.threads = [] for i in range(self.thread_num): thread", "{ \"endX\": 255, \"endY\": 127, \"startX\": 0, \"startY\": 0 }", "_download(self, x, y, z): file_path = '%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain',", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 511,", "[ { \"endX\": 127, \"endY\": 63, \"startX\": 0, \"startY\": 0", "z): return self.URL.format(x=x, y=y, z=z) def _download(self, x, y, z):", "{ \"endX\": 8191, \"endY\": 4095, \"startX\": 0, \"startY\": 0 }", "latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z) return math.floor(min_x), math.floor(min_y), math.ceil(max_x) + 1, math.ceil(max_y)", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 16383,", "% (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat)) def get_url(self, x, y, z):", "file_path) return 1 class TerrainDownloadEngine(DownloadEngine): root_dir = '' def __init__(self,", "60N and 60S, 30m Europe. Minimum global coverage of 1000m.\",", "-1 try: with open(file_path, 'wb') as f: for chunk in", "max_lng, min_lat, min_lng, start_zoom, end_zoom) d = TerrainDownloadEngine(root, bbox, 8,", "URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self, root_dir, bbox, token, task_q, logger=None,", "self.threads = [] for i in range(self.thread_num): thread = TerrainDownloaderThread(self.root_dir,", "bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat)) def get_url(self, x, y,", "not None: self.logger.error(e) if __name__ == '__main__': if 1: logger", "= '%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain', z, x, y, 'terrain') if", "as e: resp = None time.sleep(3) requre_count += 1 if", "logging.getLogger('down') try: root = r'/Users/cugxy/Documents/data/downloader' formatter = logging.Formatter('%(levelname)s-%(message)s') hdlr =", "\"endX\": 63, \"endY\": 31, \"startX\": 0, \"startY\": 0 } ],", "requests.get(url, params=param, timeout=2) if resp.status_code != 200: continue break except", "63, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "2047, \"endY\": 1023, \"startX\": 0, \"startY\": 0 } ], [", "task_q, logger=None, write_db=False): super(TerrainDownloaderThread, self).__init__( root_dir, bbox, task_q, logger, write_db=write_db,", "is not None: self.logger.exception(e) def run(self): try: self.generate_metadata() count =", "self).__init__( root_dir, bbox, task_q, logger, write_db=write_db, db_file_name='Terrain.db') self.token = token", "bboxs = self.cut_bbox() for bbox in bboxs: _count = self.get_task_count(bbox)", "try: with open(file_path, 'wb') as f: for chunk in resp.iter_content(chunk_size=1024):", "Produced using Copernicus data and \" \"information funded by the", "math.floor(min_x), math.floor(min_y), math.ceil(max_x) + 1, math.ceil(max_y) + 1 def generate_metadata(self):", "\"startY\": 0 } ], [ { \"endX\": 32767, \"endY\": 16383,", "{ \"endX\": 1023, \"endY\": 511, \"startX\": 0, \"startY\": 0 }", "\"endY\": 1023, \"startX\": 0, \"startY\": 0 } ], [ {", "} ], [ { \"endX\": 4095, \"endY\": 2047, \"startX\": 0,", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 63,", "e: resp = None time.sleep(3) if resp is None: return", "True: if request_count > 4: break try: request_count += 1", "0, \"startY\": 0 } ], [ { \"endX\": 15, \"endY\":", "'wb') as f: for chunk in resp.iter_content(chunk_size=1024): if chunk: f.write(chunk)", "= os.path.join(root, 'down.log') file_hdlr = logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO)", "generate_metadata(self): try: metadatas = { \"attribution\": \"© Analytical Graphics Inc.,", "logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng = -180.0 max_lng = 180.0 min_lat", "0 } ], [ { \"endX\": 511, \"endY\": 255, \"startX\":", "TerrainDownloadEngine(root, bbox, 8, logger) d.start() time.sleep(10000) logger.error('main thread out') except", "31, \"endY\": 15, \"startX\": 0, \"startY\": 0 } ], [", "\" \"SRTM between 60N and 60S, 30m Europe. Minimum global", "0, \"name\": \"world\", \"projection\": \"EPSG:4326\", \"scheme\": \"tms\", \"tilejson\": \"2.1.0\", \"tiles\":", "self.logger.error(e) if __name__ == '__main__': if 1: logger = logging.getLogger('down')", "7, \"endY\": 3, \"startX\": 0, \"startY\": 0 } ], [", "'' access_token = get_access_token(self.token) if access_token is None: return -1", "{ \"endX\": 2047, \"endY\": 1023, \"startX\": 0, \"startY\": 0 }", "coverage of 1000m.\", \"extensions\": [\"watermask\", \"vertexnormals\", \"octvertexnormals\", ], \"format\": \"quantized-mesh-1.0\",", "= \"https://api.cesium.com/v1/assets/1/endpoint\" while True: if request_count > 4: break try:", "requre_count = 0 _url = '' access_token = get_access_token(self.token) if", "import requests import time from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain,", "except Exception as e: resp = None time.sleep(3) if resp", "self.threads = [] self.download_done_signal.emit() except Exception as e: if self.logger", "self.threads: thread.start() for thread in self.threads: thread.wait() for t in", "0 } ], [ { \"endX\": 8191, \"endY\": 4095, \"startX\":", "1000m.\", \"extensions\": [\"watermask\", \"vertexnormals\", \"octvertexnormals\", ], \"format\": \"quantized-mesh-1.0\", \"maxzoom\": 13,", "= logging.getLogger('down') try: root = r'/Users/cugxy/Documents/data/downloader' formatter = logging.Formatter('%(levelname)s-%(message)s') hdlr", "self.logger is not None: self.logger.error(e) if __name__ == '__main__': if", "= TerrainDownloadEngine(root, bbox, 8, logger) d.start() time.sleep(10000) logger.error('main thread out')", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 7,", "[ { \"endX\": 63, \"endY\": 31, \"startX\": 0, \"startY\": 0", "access_token is None: return -1 param = {'extensions': 'octvertexnormals-watermask', 'v':", "for thread in self.threads: thread.wait() for t in self.threads: t.stop()", "import math import logging import requests import time from map_download.cmd.BaseDownloader", "max_x, max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z) return math.floor(min_x), math.floor(min_y), math.ceil(max_x)", "\"information funded by the European Union - EU-DEM layers\", \"available\":", "db_file_name='Terrain.db') self.token = token self._init_metadata( format='terrain', bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat,", "0, \"startY\": 0 } ], [ { \"endX\": 4095, \"endY\":", "1023, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "{'access_token': token} resp = requests.get(url, params=param, timeout=2) if resp.status_code !=", "os import math import logging import requests import time from", "= [] for i in range(self.thread_num): thread = TerrainDownloaderThread(self.root_dir, self.bbox,", "x, y, 'terrain') if os.path.exists(file_path): self._data2DB(x, y, z, file_path) return", "1 class TerrainDownloadEngine(DownloadEngine): root_dir = '' def __init__(self, root_dir, bbox,", "try: self.generate_metadata() count = 0 bboxs = self.cut_bbox() for bbox", "as e: return -1 self._data2DB(x, y, z, file_path) return 1", "0 } ], [ { \"endX\": 255, \"endY\": 127, \"startX\":", "json import os import math import logging import requests import", "1, math.ceil(max_y) + 1 def generate_metadata(self): try: metadatas = {", "'access_token': access_token} while True: if requre_count > 4: break try:", "class TerrainDownloaderThread(BaseDownloaderThread): URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self, root_dir, bbox, token,", "= { \"attribution\": \"© Analytical Graphics Inc., © CGIAR-CSI, Produced", "\"endY\": 1, \"startX\": 0, \"startY\": 0 } ], [ {", "resp = None time.sleep(3) requre_count += 1 if resp is", "- EU-DEM layers\", \"available\": [ [ { \"endX\": 1, \"endY\":", "self.download_done_signal.emit() except Exception as e: if self.logger is not None:", "\"startX\": 0, \"startY\": 0 } ] ], \"bounds\": [-180, -90,", "return -1 if resp.status_code != 200: return -1 try: with", "self.threads: thread.wait() for t in self.threads: t.stop() t.quit() self.threads =", "i in range(self.thread_num): thread = TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q, self.logger,", "root_dir, bbox, token, task_q, logger=None, write_db=False): super(TerrainDownloaderThread, self).__init__( root_dir, bbox,", "requre_count += 1 if resp is None: return -1 if", "127, \"endY\": 63, \"startX\": 0, \"startY\": 0 } ], [", "if resp is None: return None resp_json = resp.json() return", "[ { \"endX\": 3, \"endY\": 1, \"startX\": 0, \"startY\": 0", "logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng = -180.0 max_lng = 180.0 min_lat =", "90, ], \"description\": \"STK World Terrain Premium Tileset, v1.3. 10m", "open(file_path, 'wb') as f: for chunk in resp.iter_content(chunk_size=1024): if chunk:", "\"startY\": 0 } ], [ { \"endX\": 63, \"endY\": 31,", "Exception as e: logger.error(e) if 0: accessToken = get_access_token() pass", "None request_count = 0 url = \"https://api.cesium.com/v1/assets/1/endpoint\" while True: if", "] ], \"bounds\": [-180, -90, 180, 90, ], \"description\": \"STK", "\"startY\": 0 } ], [ { \"endX\": 3, \"endY\": 1,", "of 1000m.\", \"extensions\": [\"watermask\", \"vertexnormals\", \"octvertexnormals\", ], \"format\": \"quantized-mesh-1.0\", \"maxzoom\":", "self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for thread in self.threads: thread.start() for", "\"© Analytical Graphics Inc., © CGIAR-CSI, Produced using Copernicus data", "\"endX\": 7, \"endY\": 3, \"startX\": 0, \"startY\": 0 } ],", "import json import os import math import logging import requests", "_url = '' access_token = get_access_token(self.token) if access_token is None:", "'w') as f: json.dump(metadatas, f) except Exception as e: if", "TerrainDownloadEngine(DownloadEngine): root_dir = '' def __init__(self, root_dir, bbox, token, thread_num,", "-*- # coding=utf-8 import json import os import math import", "in resp.iter_content(chunk_size=1024): if chunk: f.write(chunk) except Exception as e: return", "by the European Union - EU-DEM layers\", \"available\": [ [", "} ], [ { \"endX\": 8191, \"endY\": 4095, \"startX\": 0,", "} ], [ { \"endX\": 31, \"endY\": 15, \"startX\": 0,", "10m - 30m resolution CONUS, 30m resolution \" \"SRTM between", "BaseDownloaderThread, latlng2tile_terrain, BoundBox def get_access_token(token): resp = None request_count =", "as e: if self.logger is not None: self.logger.exception(e) def run(self):", "__name__ == '__main__': if 1: logger = logging.getLogger('down') try: root", "return None resp_json = resp.json() return resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread): URL", "[ { \"endX\": 1, \"endY\": 0, \"startX\": 0, \"startY\": 0", "], [ { \"endX\": 127, \"endY\": 63, \"startX\": 0, \"startY\":", "\"extensions\": [\"watermask\", \"vertexnormals\", \"octvertexnormals\", ], \"format\": \"quantized-mesh-1.0\", \"maxzoom\": 13, \"minzoom\":", "{ \"endX\": 511, \"endY\": 255, \"startX\": 0, \"startY\": 0 }", "= requests.get(url, params=param, timeout=2) if resp.status_code != 200: continue break", "bbox in bboxs: _count = self.get_task_count(bbox) count += _count self.division_done_signal.emit(count)", "0, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "], \"format\": \"quantized-mesh-1.0\", \"maxzoom\": 13, \"minzoom\": 0, \"name\": \"world\", \"projection\":", "= root_dir self.token = token def bbox2xyz(self, bbox, z): min_x,", "Union - EU-DEM layers\", \"available\": [ [ { \"endX\": 1,", "[\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\": \"1.31376.0\" } _dir = os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir,", "Analytical Graphics Inc., © CGIAR-CSI, Produced using Copernicus data and", "], [ { \"endX\": 4095, \"endY\": 2047, \"startX\": 0, \"startY\":", "0 _url = '' access_token = get_access_token(self.token) if access_token is", "= None time.sleep(3) requre_count += 1 if resp is None:", "while True: if request_count > 4: break try: request_count +=", "4: break try: _url = self.get_url(x, y, z) resp =", "thread out') except Exception as e: logger.error(e) if 0: accessToken", "Exception as e: resp = None time.sleep(3) requre_count += 1", "start_zoom, end_zoom) d = TerrainDownloadEngine(root, bbox, 8, logger) d.start() time.sleep(10000)", "-1 param = {'extensions': 'octvertexnormals-watermask', 'v': '1.1.0', 'access_token': access_token} while", "resp.iter_content(chunk_size=1024): if chunk: f.write(chunk) except Exception as e: return -1", "\"tms\", \"tilejson\": \"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\": \"1.31376.0\" } _dir", "_count self.division_done_signal.emit(count) for bbox in bboxs: while True: if not", "} ], [ { \"endX\": 1023, \"endY\": 511, \"startX\": 0,", "\"endY\": 7, \"startX\": 0, \"startY\": 0 } ], [ {", "json.dump(metadatas, f) except Exception as e: if self.logger is not", "0 } ], [ { \"endX\": 31, \"endY\": 15, \"startX\":", "in self.threads: t.stop() t.quit() self.threads = [] self.download_done_signal.emit() except Exception", "continue break except Exception as e: resp = None time.sleep(3)", "try: metadatas = { \"attribution\": \"© Analytical Graphics Inc., ©", "[ { \"endX\": 16383, \"endY\": 8191, \"startX\": 0, \"startY\": 0", "+= 1 param = {'access_token': token} resp = requests.get(url, params=param,", "= r'/Users/cugxy/Documents/data/downloader' formatter = logging.Formatter('%(levelname)s-%(message)s') hdlr = logging.StreamHandler() log_file =", "thread.start() for thread in self.threads: thread.wait() for t in self.threads:", "def get_url(self, x, y, z): return self.URL.format(x=x, y=y, z=z) def", "\"STK World Terrain Premium Tileset, v1.3. 10m - 30m resolution", "0 end_zoom = 5 bbox = BoundBox(max_lat, max_lng, min_lat, min_lng,", "root_dir self.token = token def bbox2xyz(self, bbox, z): min_x, min_y", "Exception as e: return -1 self._data2DB(x, y, z, file_path) return", "except Exception as e: logger.error(e) if 0: accessToken = get_access_token()", "task_q, self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for thread in self.threads: thread.start()", "token} resp = requests.get(url, params=param, timeout=2) if resp.status_code != 200:", "180, 90, ], \"description\": \"STK World Terrain Premium Tileset, v1.3.", "200: continue break except Exception as e: resp = None", "None: return -1 param = {'extensions': 'octvertexnormals-watermask', 'v': '1.1.0', 'access_token':", "__init__(self, root_dir, bbox, token, task_q, logger=None, write_db=False): super(TerrainDownloaderThread, self).__init__( root_dir,", "[ { \"endX\": 511, \"endY\": 255, \"startX\": 0, \"startY\": 0", "e: resp = None time.sleep(3) requre_count += 1 if resp", "0 } ], [ { \"endX\": 3, \"endY\": 1, \"startX\":", "os.path.exists(file_path): self._data2DB(x, y, z, file_path) return 0 os.makedirs(os.path.dirname(file_path), exist_ok=True) resp", "between 60N and 60S, 30m Europe. Minimum global coverage of", "\"endY\": 16383, \"startX\": 0, \"startY\": 0 } ] ], \"bounds\":", "requre_count > 4: break try: _url = self.get_url(x, y, z)", "\"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\": \"1.31376.0\" } _dir = os.path.join(self.root_dir,", "} ], [ { \"endX\": 255, \"endY\": 127, \"startX\": 0,", "super(TerrainDownloaderThread, self).__init__( root_dir, bbox, task_q, logger, write_db=write_db, db_file_name='Terrain.db') self.token =", "= os.path.join(_dir, 'layer.json') with open(metadatas_path, 'w') as f: json.dump(metadatas, f)", "\"available\": [ [ { \"endX\": 1, \"endY\": 0, \"startX\": 0,", "bbox, 8, logger) d.start() time.sleep(10000) logger.error('main thread out') except Exception", "self.get_task_queue(bbox) self.threads = [] for i in range(self.thread_num): thread =", "\"endX\": 16383, \"endY\": 8191, \"startX\": 0, \"startY\": 0 } ],", "bbox.max_lng, z) return math.floor(min_x), math.floor(min_y), math.ceil(max_x) + 1, math.ceil(max_y) +", "logger, write_db=write_db) self.root_dir = root_dir self.token = token def bbox2xyz(self,", "as e: if self.logger is not None: self.logger.error(e) if __name__", "# coding=utf-8 import json import os import math import logging", "\"endY\": 0, \"startX\": 0, \"startY\": 0 } ], [ {", "= None request_count = 0 url = \"https://api.cesium.com/v1/assets/1/endpoint\" while True:", "break try: _url = self.get_url(x, y, z) resp = requests.get(_url,", "# -*- coding: utf-8 -*- # coding=utf-8 import json import", "max_lng = 180.0 min_lat = -90.0 max_lat = 90.0 start_zoom", "0, \"startY\": 0 } ], [ { \"endX\": 511, \"endY\":", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 2047,", "bbox in bboxs: while True: if not self.running: time.sleep(0.01) else:", "\"startY\": 0 } ] ], \"bounds\": [-180, -90, 180, 90,", "def __init__(self, root_dir, bbox, token, task_q, logger=None, write_db=False): super(TerrainDownloaderThread, self).__init__(", "for thread in self.threads: thread.start() for thread in self.threads: thread.wait()", "except Exception as e: if self.logger is not None: self.logger.error(e)", "thread = TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q, self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread)", "logger=None, write_db=False): super(TerrainDownloaderThread, self).__init__( root_dir, bbox, task_q, logger, write_db=write_db, db_file_name='Terrain.db')", "bbox2xyz(self, bbox, z): min_x, min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z) max_x,", "1023, \"endY\": 511, \"startX\": 0, \"startY\": 0 } ], [", "self.bbox.max_lng, self.bbox.max_lat)) def get_url(self, x, y, z): return self.URL.format(x=x, y=y,", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 3,", "logging.StreamHandler() log_file = os.path.join(root, 'down.log') file_hdlr = logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr)", "for t in self.threads: t.stop() t.quit() self.threads = [] self.download_done_signal.emit()", "[ { \"endX\": 2047, \"endY\": 1023, \"startX\": 0, \"startY\": 0", "\"octvertexnormals\", ], \"format\": \"quantized-mesh-1.0\", \"maxzoom\": 13, \"minzoom\": 0, \"name\": \"world\",", "resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread): URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self, root_dir, bbox,", "0 } ], [ { \"endX\": 127, \"endY\": 63, \"startX\":", "\"endX\": 1023, \"endY\": 511, \"startX\": 0, \"startY\": 0 } ],", "{ \"endX\": 63, \"endY\": 31, \"startX\": 0, \"startY\": 0 }", "= 180.0 min_lat = -90.0 max_lat = 90.0 start_zoom =", "= \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self, root_dir, bbox, token, task_q, logger=None, write_db=False):", "], [ { \"endX\": 1023, \"endY\": 511, \"startX\": 0, \"startY\":", "if os.path.exists(file_path): self._data2DB(x, y, z, file_path) return 0 os.makedirs(os.path.dirname(file_path), exist_ok=True)", "= {'extensions': 'octvertexnormals-watermask', 'v': '1.1.0', 'access_token': access_token} while True: if", "requests.get(_url, params=param, stream=True, timeout=2) break except Exception as e: resp", "file_hdlr = logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng = -180.0", "Exception as e: resp = None time.sleep(3) if resp is", "0, \"startY\": 0 } ], [ { \"endX\": 255, \"endY\":", "> 4: break try: request_count += 1 param = {'access_token':", "z) resp = requests.get(_url, params=param, stream=True, timeout=2) break except Exception", "= self.cut_bbox() for bbox in bboxs: _count = self.get_task_count(bbox) count", "os.makedirs(os.path.dirname(file_path), exist_ok=True) resp = None requre_count = 0 _url =", "f.write(chunk) except Exception as e: return -1 self._data2DB(x, y, z,", "[ { \"endX\": 255, \"endY\": 127, \"startX\": 0, \"startY\": 0", "Terrain Premium Tileset, v1.3. 10m - 30m resolution CONUS, 30m", "write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for thread in self.threads: thread.start() for thread", "bbox, token, thread_num, logger=None, write_db=False): super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger, write_db=write_db)", "0 } ], [ { \"endX\": 32767, \"endY\": 16383, \"startX\":", "while True: if requre_count > 4: break try: _url =", "_url = self.get_url(x, y, z) resp = requests.get(_url, params=param, stream=True,", "self.token, task_q, self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for thread in self.threads:", "200: return -1 try: with open(file_path, 'wb') as f: for", "{ \"endX\": 31, \"endY\": 15, \"startX\": 0, \"startY\": 0 }", "= self.get_url(x, y, z) resp = requests.get(_url, params=param, stream=True, timeout=2)", "f: json.dump(metadatas, f) except Exception as e: if self.logger is", "super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger, write_db=write_db) self.root_dir = root_dir self.token =", "[ { \"endX\": 31, \"endY\": 15, \"startX\": 0, \"startY\": 0", "in bboxs: _count = self.get_task_count(bbox) count += _count self.division_done_signal.emit(count) for", "\"startY\": 0 } ], [ { \"endX\": 255, \"endY\": 127,", "\"maxzoom\": 13, \"minzoom\": 0, \"name\": \"world\", \"projection\": \"EPSG:4326\", \"scheme\": \"tms\",", "logger.setLevel(logging.INFO) min_lng = -180.0 max_lng = 180.0 min_lat = -90.0", "= 90.0 start_zoom = 0 end_zoom = 5 bbox =", "0 os.makedirs(os.path.dirname(file_path), exist_ok=True) resp = None requre_count = 0 _url", "{ \"endX\": 4095, \"endY\": 2047, \"startX\": 0, \"startY\": 0 }", "'%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain', z, x, y, 'terrain') if os.path.exists(file_path):", "0, \"startY\": 0 } ], [ { \"endX\": 8191, \"endY\":", "thread_num, logger=None, write_db=False): super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger, write_db=write_db) self.root_dir =", "from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox def get_access_token(token): resp", "self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat)) def get_url(self, x, y, z): return self.URL.format(x=x,", "break task_q = self.get_task_queue(bbox) self.threads = [] for i in", "None: return None resp_json = resp.json() return resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread):", "logging.Formatter('%(levelname)s-%(message)s') hdlr = logging.StreamHandler() log_file = os.path.join(root, 'down.log') file_hdlr =", "coding=utf-8 import json import os import math import logging import", "\"endX\": 4095, \"endY\": 2047, \"startX\": 0, \"startY\": 0 } ],", "= logging.Formatter('%(levelname)s-%(message)s') hdlr = logging.StreamHandler() log_file = os.path.join(root, 'down.log') file_hdlr", "{ \"attribution\": \"© Analytical Graphics Inc., © CGIAR-CSI, Produced using", "Minimum global coverage of 1000m.\", \"extensions\": [\"watermask\", \"vertexnormals\", \"octvertexnormals\", ],", "\"vertexnormals\", \"octvertexnormals\", ], \"format\": \"quantized-mesh-1.0\", \"maxzoom\": 13, \"minzoom\": 0, \"name\":", "and \" \"information funded by the European Union - EU-DEM", "\"world\", \"projection\": \"EPSG:4326\", \"scheme\": \"tms\", \"tilejson\": \"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ],", "for bbox in bboxs: _count = self.get_task_count(bbox) count += _count", "return math.floor(min_x), math.floor(min_y), math.ceil(max_x) + 1, math.ceil(max_y) + 1 def", "\"startY\": 0 } ], [ { \"endX\": 127, \"endY\": 63,", "{ \"endX\": 32767, \"endY\": 16383, \"startX\": 0, \"startY\": 0 }", "break except Exception as e: resp = None time.sleep(3) requre_count", "None requre_count = 0 _url = '' access_token = get_access_token(self.token)", "root_dir, bbox, token, thread_num, logger=None, write_db=False): super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger,", "get_access_token(self.token) if access_token is None: return -1 param = {'extensions':", "© CGIAR-CSI, Produced using Copernicus data and \" \"information funded", "self.threads: t.stop() t.quit() self.threads = [] self.download_done_signal.emit() except Exception as", "def bbox2xyz(self, bbox, z): min_x, min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z)", "min_x, min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z) max_x, max_y = latlng2tile_terrain(bbox.max_lat,", "self.root_dir = root_dir self.token = token def bbox2xyz(self, bbox, z):", "\"endX\": 127, \"endY\": 63, \"startX\": 0, \"startY\": 0 } ],", "latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z) max_x, max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z) return", "\"endX\": 255, \"endY\": 127, \"startX\": 0, \"startY\": 0 } ],", "self._data2DB(x, y, z, file_path) return 1 class TerrainDownloadEngine(DownloadEngine): root_dir =", "import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox def get_access_token(token): resp = None", "{'extensions': 'octvertexnormals-watermask', 'v': '1.1.0', 'access_token': access_token} while True: if requre_count", "end_zoom = 5 bbox = BoundBox(max_lat, max_lng, min_lat, min_lng, start_zoom,", "exist_ok=True) metadatas_path = os.path.join(_dir, 'layer.json') with open(metadatas_path, 'w') as f:", "1 if resp is None: return -1 if resp.status_code !=", "+= _count self.division_done_signal.emit(count) for bbox in bboxs: while True: if", "= -90.0 max_lat = 90.0 start_zoom = 0 end_zoom =", "\"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\": \"1.31376.0\" } _dir = os.path.join(self.root_dir, 'Terrain')", "self.bbox.max_lat)) def get_url(self, x, y, z): return self.URL.format(x=x, y=y, z=z)", "], [ { \"endX\": 7, \"endY\": 3, \"startX\": 0, \"startY\":", "import time from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox def", "os.path.join(_dir, 'layer.json') with open(metadatas_path, 'w') as f: json.dump(metadatas, f) except", "\"SRTM between 60N and 60S, 30m Europe. Minimum global coverage", "4: break try: request_count += 1 param = {'access_token': token}", "resolution CONUS, 30m resolution \" \"SRTM between 60N and 60S,", "[] self.download_done_signal.emit() except Exception as e: if self.logger is not", "30m Europe. Minimum global coverage of 1000m.\", \"extensions\": [\"watermask\", \"vertexnormals\",", "0 url = \"https://api.cesium.com/v1/assets/1/endpoint\" while True: if request_count > 4:", "end_zoom) d = TerrainDownloadEngine(root, bbox, 8, logger) d.start() time.sleep(10000) logger.error('main", "\"endX\": 31, \"endY\": 15, \"startX\": 0, \"startY\": 0 } ],", "z=z) def _download(self, x, y, z): file_path = '%s/%s/%i/%i/%i.%s' %", "return -1 self._data2DB(x, y, z, file_path) return 1 class TerrainDownloadEngine(DownloadEngine):", "+= 1 if resp is None: return -1 if resp.status_code", "thread_num, logger, write_db=write_db) self.root_dir = root_dir self.token = token def", "\"format\": \"quantized-mesh-1.0\", \"maxzoom\": 13, \"minzoom\": 0, \"name\": \"world\", \"projection\": \"EPSG:4326\",", "None: self.logger.exception(e) def run(self): try: self.generate_metadata() count = 0 bboxs", "bboxs: while True: if not self.running: time.sleep(0.01) else: break task_q", "\"endY\": 8191, \"startX\": 0, \"startY\": 0 } ], [ {", "'' def __init__(self, root_dir, bbox, token, thread_num, logger=None, write_db=False): super(TerrainDownloadEngine,", "max_lat = 90.0 start_zoom = 0 end_zoom = 5 bbox", "self._init_metadata( format='terrain', bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat)) def get_url(self,", "\"name\": \"world\", \"projection\": \"EPSG:4326\", \"scheme\": \"tms\", \"tilejson\": \"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\",", "self.logger.exception(e) def run(self): try: self.generate_metadata() count = 0 bboxs =", "0, \"startY\": 0 } ], [ { \"endX\": 2047, \"endY\":", "write_db=False): super(TerrainDownloaderThread, self).__init__( root_dir, bbox, task_q, logger, write_db=write_db, db_file_name='Terrain.db') self.token", "token, task_q, logger=None, write_db=False): super(TerrainDownloaderThread, self).__init__( root_dir, bbox, task_q, logger,", "get_url(self, x, y, z): return self.URL.format(x=x, y=y, z=z) def _download(self,", "log_file = os.path.join(root, 'down.log') file_hdlr = logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr)", "def generate_metadata(self): try: metadatas = { \"attribution\": \"© Analytical Graphics", "= token def bbox2xyz(self, bbox, z): min_x, min_y = latlng2tile_terrain(bbox.min_lat,", "not None: self.logger.exception(e) def run(self): try: self.generate_metadata() count = 0", "in self.threads: thread.start() for thread in self.threads: thread.wait() for t", "in self.threads: thread.wait() for t in self.threads: t.stop() t.quit() self.threads", "return -1 param = {'extensions': 'octvertexnormals-watermask', 'v': '1.1.0', 'access_token': access_token}", "if self.logger is not None: self.logger.error(e) if __name__ == '__main__':", "map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox def get_access_token(token): resp =", "2047, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "access_token} while True: if requre_count > 4: break try: _url", "= get_access_token(self.token) if access_token is None: return -1 param =", "if __name__ == '__main__': if 1: logger = logging.getLogger('down') try:", "self.URL.format(x=x, y=y, z=z) def _download(self, x, y, z): file_path =", "\"https://api.cesium.com/v1/assets/1/endpoint\" while True: if request_count > 4: break try: request_count", "z, file_path) return 1 class TerrainDownloadEngine(DownloadEngine): root_dir = '' def", "d = TerrainDownloadEngine(root, bbox, 8, logger) d.start() time.sleep(10000) logger.error('main thread", "request_count > 4: break try: request_count += 1 param =", "\"endX\": 2047, \"endY\": 1023, \"startX\": 0, \"startY\": 0 } ],", "Premium Tileset, v1.3. 10m - 30m resolution CONUS, 30m resolution", "return 0 os.makedirs(os.path.dirname(file_path), exist_ok=True) resp = None requre_count = 0", "0 } ], [ { \"endX\": 2047, \"endY\": 1023, \"startX\":", "Europe. Minimum global coverage of 1000m.\", \"extensions\": [\"watermask\", \"vertexnormals\", \"octvertexnormals\",", "= latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z) max_x, max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z)", "\"tilejson\": \"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\": \"1.31376.0\" } _dir =", "True: if requre_count > 4: break try: _url = self.get_url(x,", "if not self.running: time.sleep(0.01) else: break task_q = self.get_task_queue(bbox) self.threads", "resolution \" \"SRTM between 60N and 60S, 30m Europe. Minimum", "= {'access_token': token} resp = requests.get(url, params=param, timeout=2) if resp.status_code", "1, \"endY\": 0, \"startX\": 0, \"startY\": 0 } ], [", "\"bounds\": [-180, -90, 180, 90, ], \"description\": \"STK World Terrain", "thread in self.threads: thread.wait() for t in self.threads: t.stop() t.quit()", "os.path.join(root, 'down.log') file_hdlr = logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng", "is None: return -1 param = {'extensions': 'octvertexnormals-watermask', 'v': '1.1.0',", "y, z, file_path) return 1 class TerrainDownloadEngine(DownloadEngine): root_dir = ''", "1 def generate_metadata(self): try: metadatas = { \"attribution\": \"© Analytical", "1, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "= None time.sleep(3) if resp is None: return None resp_json", "url = \"https://api.cesium.com/v1/assets/1/endpoint\" while True: if request_count > 4: break", "bbox.min_lng, z) max_x, max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z) return math.floor(min_x),", "} ], [ { \"endX\": 511, \"endY\": 255, \"startX\": 0,", "Exception as e: if self.logger is not None: self.logger.error(e) if", "-180.0 max_lng = 180.0 min_lat = -90.0 max_lat = 90.0", "} _dir = os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir, exist_ok=True) metadatas_path = os.path.join(_dir,", "True: if not self.running: time.sleep(0.01) else: break task_q = self.get_task_queue(bbox)", "!= 200: return -1 try: with open(file_path, 'wb') as f:", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 1023,", "None time.sleep(3) if resp is None: return None resp_json =", "} ], [ { \"endX\": 16383, \"endY\": 8191, \"startX\": 0,", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 8191,", "if resp.status_code != 200: return -1 try: with open(file_path, 'wb')", "open(metadatas_path, 'w') as f: json.dump(metadatas, f) except Exception as e:", "= 0 end_zoom = 5 bbox = BoundBox(max_lat, max_lng, min_lat,", "> 4: break try: _url = self.get_url(x, y, z) resp", "params=param, stream=True, timeout=2) break except Exception as e: resp =", "count = 0 bboxs = self.cut_bbox() for bbox in bboxs:", "def _download(self, x, y, z): file_path = '%s/%s/%i/%i/%i.%s' % (self.root_dir,", "bbox, task_q, logger, write_db=write_db, db_file_name='Terrain.db') self.token = token self._init_metadata( format='terrain',", "Exception as e: if self.logger is not None: self.logger.exception(e) def", "3, \"endY\": 1, \"startX\": 0, \"startY\": 0 } ], [", "e: return -1 self._data2DB(x, y, z, file_path) return 1 class", "stream=True, timeout=2) break except Exception as e: resp = None", "= None requre_count = 0 _url = '' access_token =", "= '' access_token = get_access_token(self.token) if access_token is None: return", "0 } ], [ { \"endX\": 63, \"endY\": 31, \"startX\":", "utf-8 -*- # coding=utf-8 import json import os import math", "\"startY\": 0 } ], [ { \"endX\": 15, \"endY\": 7,", "e: if self.logger is not None: self.logger.exception(e) def run(self): try:", "latlng2tile_terrain, BoundBox def get_access_token(token): resp = None request_count = 0", "'v': '1.1.0', 'access_token': access_token} while True: if requre_count > 4:", "x, y, z): file_path = '%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain', z,", "time.sleep(3) requre_count += 1 if resp is None: return -1", "(self.root_dir, 'Terrain', z, x, y, 'terrain') if os.path.exists(file_path): self._data2DB(x, y,", "], [ { \"endX\": 3, \"endY\": 1, \"startX\": 0, \"startY\":", "} ], [ { \"endX\": 2047, \"endY\": 1023, \"startX\": 0,", "[-180, -90, 180, 90, ], \"description\": \"STK World Terrain Premium", "-1 self._data2DB(x, y, z, file_path) return 1 class TerrainDownloadEngine(DownloadEngine): root_dir", "return 1 class TerrainDownloadEngine(DownloadEngine): root_dir = '' def __init__(self, root_dir,", "0 } ] ], \"bounds\": [-180, -90, 180, 90, ],", "European Union - EU-DEM layers\", \"available\": [ [ { \"endX\":", "\"version\": \"1.31376.0\" } _dir = os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir, exist_ok=True) metadatas_path", "TerrainDownloaderThread(BaseDownloaderThread): URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self, root_dir, bbox, token, task_q,", "z) max_x, max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z) return math.floor(min_x), math.floor(min_y),", "\"EPSG:4326\", \"scheme\": \"tms\", \"tilejson\": \"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\": \"1.31376.0\"", "\"endX\": 1, \"endY\": 0, \"startX\": 0, \"startY\": 0 } ],", "= latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z) return math.floor(min_x), math.floor(min_y), math.ceil(max_x) + 1,", "= -180.0 max_lng = 180.0 min_lat = -90.0 max_lat =", "resp = None requre_count = 0 _url = '' access_token", "return resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread): URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self, root_dir,", "import os import math import logging import requests import time", "resp is None: return -1 if resp.status_code != 200: return", "if 1: logger = logging.getLogger('down') try: root = r'/Users/cugxy/Documents/data/downloader' formatter", "z) return math.floor(min_x), math.floor(min_y), math.ceil(max_x) + 1, math.ceil(max_y) + 1", "\"startY\": 0 } ], [ { \"endX\": 31, \"endY\": 15,", "1: logger = logging.getLogger('down') try: root = r'/Users/cugxy/Documents/data/downloader' formatter =", "[ { \"endX\": 4095, \"endY\": 2047, \"startX\": 0, \"startY\": 0", "try: root = r'/Users/cugxy/Documents/data/downloader' formatter = logging.Formatter('%(levelname)s-%(message)s') hdlr = logging.StreamHandler()", "request_count += 1 param = {'access_token': token} resp = requests.get(url,", "\"endY\": 3, \"startX\": 0, \"startY\": 0 } ], [ {", "r'/Users/cugxy/Documents/data/downloader' formatter = logging.Formatter('%(levelname)s-%(message)s') hdlr = logging.StreamHandler() log_file = os.path.join(root,", "if access_token is None: return -1 param = {'extensions': 'octvertexnormals-watermask',", "max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z) return math.floor(min_x), math.floor(min_y), math.ceil(max_x) +", "BoundBox def get_access_token(token): resp = None request_count = 0 url", "63, \"endY\": 31, \"startX\": 0, \"startY\": 0 } ], [", "\"startY\": 0 } ], [ { \"endX\": 511, \"endY\": 255,", "if requre_count > 4: break try: _url = self.get_url(x, y,", "= [] self.download_done_signal.emit() except Exception as e: if self.logger is", "if request_count > 4: break try: request_count += 1 param", "math import logging import requests import time from map_download.cmd.BaseDownloader import", "0, \"startY\": 0 } ], [ { \"endX\": 63, \"endY\":", "31, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "bboxs: _count = self.get_task_count(bbox) count += _count self.division_done_signal.emit(count) for bbox", "metadatas_path = os.path.join(_dir, 'layer.json') with open(metadatas_path, 'w') as f: json.dump(metadatas,", "= resp.json() return resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread): URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def", "-1 if resp.status_code != 200: return -1 try: with open(file_path,", "y, 'terrain') if os.path.exists(file_path): self._data2DB(x, y, z, file_path) return 0", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 32767,", "min_lat, min_lng, start_zoom, end_zoom) d = TerrainDownloadEngine(root, bbox, 8, logger)", "} ], [ { \"endX\": 7, \"endY\": 3, \"startX\": 0,", "resp = requests.get(url, params=param, timeout=2) if resp.status_code != 200: continue", "y, z, file_path) return 0 os.makedirs(os.path.dirname(file_path), exist_ok=True) resp = None", "0 } ], [ { \"endX\": 16383, \"endY\": 8191, \"startX\":", "13, \"minzoom\": 0, \"name\": \"world\", \"projection\": \"EPSG:4326\", \"scheme\": \"tms\", \"tilejson\":", "'octvertexnormals-watermask', 'v': '1.1.0', 'access_token': access_token} while True: if requre_count >", "5 bbox = BoundBox(max_lat, max_lng, min_lat, min_lng, start_zoom, end_zoom) d", "resp = requests.get(_url, params=param, stream=True, timeout=2) break except Exception as", "[ { \"endX\": 8191, \"endY\": 4095, \"startX\": 0, \"startY\": 0", "layers\", \"available\": [ [ { \"endX\": 1, \"endY\": 0, \"startX\":", "root = r'/Users/cugxy/Documents/data/downloader' formatter = logging.Formatter('%(levelname)s-%(message)s') hdlr = logging.StreamHandler() log_file", "logging import requests import time from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread,", "= self.get_task_count(bbox) count += _count self.division_done_signal.emit(count) for bbox in bboxs:", "180.0 min_lat = -90.0 max_lat = 90.0 start_zoom = 0", "30m resolution \" \"SRTM between 60N and 60S, 30m Europe.", "z, file_path) return 0 os.makedirs(os.path.dirname(file_path), exist_ok=True) resp = None requre_count", "} ] ], \"bounds\": [-180, -90, 180, 90, ], \"description\":", "in range(self.thread_num): thread = TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q, self.logger, write_db=self.write_db)", "with open(file_path, 'wb') as f: for chunk in resp.iter_content(chunk_size=1024): if", "math.ceil(max_x) + 1, math.ceil(max_y) + 1 def generate_metadata(self): try: metadatas", "format='terrain', bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat)) def get_url(self, x,", "0 } ], [ { \"endX\": 1023, \"endY\": 511, \"startX\":", "logger) d.start() time.sleep(10000) logger.error('main thread out') except Exception as e:", "0, \"startY\": 0 } ] ], \"bounds\": [-180, -90, 180,", "], [ { \"endX\": 32767, \"endY\": 16383, \"startX\": 0, \"startY\":", "resp_json = resp.json() return resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread): URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\"", "0, \"startY\": 0 } ], [ { \"endX\": 16383, \"endY\":", "os.makedirs(_dir, exist_ok=True) metadatas_path = os.path.join(_dir, 'layer.json') with open(metadatas_path, 'w') as", "file_path = '%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain', z, x, y, 'terrain')", "0, \"startY\": 0 } ], [ { \"endX\": 31, \"endY\":", "resp is None: return None resp_json = resp.json() return resp_json.get('accessToken')", "-*- coding: utf-8 -*- # coding=utf-8 import json import os", "(self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat)) def get_url(self, x, y, z): return", "= '' def __init__(self, root_dir, bbox, token, thread_num, logger=None, write_db=False):", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 4095,", "return self.URL.format(x=x, y=y, z=z) def _download(self, x, y, z): file_path", "8191, \"endY\": 4095, \"startX\": 0, \"startY\": 0 } ], [", "% (self.root_dir, 'Terrain', z, x, y, 'terrain') if os.path.exists(file_path): self._data2DB(x,", "count += _count self.division_done_signal.emit(count) for bbox in bboxs: while True:", "60S, 30m Europe. Minimum global coverage of 1000m.\", \"extensions\": [\"watermask\",", "= BoundBox(max_lat, max_lng, min_lat, min_lng, start_zoom, end_zoom) d = TerrainDownloadEngine(root,", "= 0 _url = '' access_token = get_access_token(self.token) if access_token", "as f: for chunk in resp.iter_content(chunk_size=1024): if chunk: f.write(chunk) except", "y, z): return self.URL.format(x=x, y=y, z=z) def _download(self, x, y,", "+ 1, math.ceil(max_y) + 1 def generate_metadata(self): try: metadatas =", "{ \"endX\": 16383, \"endY\": 8191, \"startX\": 0, \"startY\": 0 }", "as e: resp = None time.sleep(3) if resp is None:", "else: break task_q = self.get_task_queue(bbox) self.threads = [] for i", "], [ { \"endX\": 63, \"endY\": 31, \"startX\": 0, \"startY\":", "_count = self.get_task_count(bbox) count += _count self.division_done_signal.emit(count) for bbox in", "Graphics Inc., © CGIAR-CSI, Produced using Copernicus data and \"", "task_q, logger, write_db=write_db, db_file_name='Terrain.db') self.token = token self._init_metadata( format='terrain', bounds='%f,%f,%f,%f'", "32767, \"endY\": 16383, \"startX\": 0, \"startY\": 0 } ] ],", "resp = None time.sleep(3) if resp is None: return None", "get_access_token(token): resp = None request_count = 0 url = \"https://api.cesium.com/v1/assets/1/endpoint\"", "None: self.logger.error(e) if __name__ == '__main__': if 1: logger =", "\"endX\": 8191, \"endY\": 4095, \"startX\": 0, \"startY\": 0 } ],", "in bboxs: while True: if not self.running: time.sleep(0.01) else: break", "z, x, y, 'terrain') if os.path.exists(file_path): self._data2DB(x, y, z, file_path)", "15, \"endY\": 7, \"startX\": 0, \"startY\": 0 } ], [", "t.stop() t.quit() self.threads = [] self.download_done_signal.emit() except Exception as e:", "for bbox in bboxs: while True: if not self.running: time.sleep(0.01)", "127, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "[] for i in range(self.thread_num): thread = TerrainDownloaderThread(self.root_dir, self.bbox, self.token,", "EU-DEM layers\", \"available\": [ [ { \"endX\": 1, \"endY\": 0,", "TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q, self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for thread", "-90, 180, 90, ], \"description\": \"STK World Terrain Premium Tileset,", "1 param = {'access_token': token} resp = requests.get(url, params=param, timeout=2)", "time.sleep(0.01) else: break task_q = self.get_task_queue(bbox) self.threads = [] for", "exist_ok=True) resp = None requre_count = 0 _url = ''", "= logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng = -180.0 max_lng", "if self.logger is not None: self.logger.exception(e) def run(self): try: self.generate_metadata()", "except Exception as e: return -1 self._data2DB(x, y, z, file_path)", "while True: if not self.running: time.sleep(0.01) else: break task_q =", "using Copernicus data and \" \"information funded by the European", "y=y, z=z) def _download(self, x, y, z): file_path = '%s/%s/%i/%i/%i.%s'", "], [ { \"endX\": 511, \"endY\": 255, \"startX\": 0, \"startY\":", "Tileset, v1.3. 10m - 30m resolution CONUS, 30m resolution \"", "} ], [ { \"endX\": 32767, \"endY\": 16383, \"startX\": 0,", "if resp is None: return -1 if resp.status_code != 200:", "logger, write_db=write_db, db_file_name='Terrain.db') self.token = token self._init_metadata( format='terrain', bounds='%f,%f,%f,%f' %", "], \"bounds\": [-180, -90, 180, 90, ], \"description\": \"STK World", "'layer.json') with open(metadatas_path, 'w') as f: json.dump(metadatas, f) except Exception", "run(self): try: self.generate_metadata() count = 0 bboxs = self.cut_bbox() for", "data and \" \"information funded by the European Union -", "[ { \"endX\": 32767, \"endY\": 16383, \"startX\": 0, \"startY\": 0", "\"endX\": 3, \"endY\": 1, \"startX\": 0, \"startY\": 0 } ],", "\"projection\": \"EPSG:4326\", \"scheme\": \"tms\", \"tilejson\": \"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\":", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 31,", "{ \"endX\": 3, \"endY\": 1, \"startX\": 0, \"startY\": 0 }", "root_dir, bbox, task_q, logger, write_db=write_db, db_file_name='Terrain.db') self.token = token self._init_metadata(", "self.get_task_count(bbox) count += _count self.division_done_signal.emit(count) for bbox in bboxs: while", "'__main__': if 1: logger = logging.getLogger('down') try: root = r'/Users/cugxy/Documents/data/downloader'", "math.floor(min_y), math.ceil(max_x) + 1, math.ceil(max_y) + 1 def generate_metadata(self): try:", "time.sleep(3) if resp is None: return None resp_json = resp.json()", "v1.3. 10m - 30m resolution CONUS, 30m resolution \" \"SRTM", "thread in self.threads: thread.start() for thread in self.threads: thread.wait() for", "None: return -1 if resp.status_code != 200: return -1 try:", "resp.status_code != 200: return -1 try: with open(file_path, 'wb') as", "\"endY\": 127, \"startX\": 0, \"startY\": 0 } ], [ {", "funded by the European Union - EU-DEM layers\", \"available\": [", "'terrain') if os.path.exists(file_path): self._data2DB(x, y, z, file_path) return 0 os.makedirs(os.path.dirname(file_path),", "\"endX\": 15, \"endY\": 7, \"startX\": 0, \"startY\": 0 } ],", "\"endY\": 31, \"startX\": 0, \"startY\": 0 } ], [ {", "chunk: f.write(chunk) except Exception as e: return -1 self._data2DB(x, y,", "[\"watermask\", \"vertexnormals\", \"octvertexnormals\", ], \"format\": \"quantized-mesh-1.0\", \"maxzoom\": 13, \"minzoom\": 0,", "z): min_x, min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z) max_x, max_y =", "range(self.thread_num): thread = TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q, self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar)", "and 60S, 30m Europe. Minimum global coverage of 1000m.\", \"extensions\":", "} ], [ { \"endX\": 127, \"endY\": 63, \"startX\": 0,", "0 } ], [ { \"endX\": 15, \"endY\": 7, \"startX\":", "try: _url = self.get_url(x, y, z) resp = requests.get(_url, params=param,", "\"scheme\": \"tms\", \"tilejson\": \"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\": \"1.31376.0\" }", "requests import time from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox", "__init__(self, root_dir, bbox, token, thread_num, logger=None, write_db=False): super(TerrainDownloadEngine, self).__init__(bbox, thread_num,", "math.ceil(max_y) + 1 def generate_metadata(self): try: metadatas = { \"attribution\":", "\"startY\": 0 } ], [ { \"endX\": 7, \"endY\": 3,", "y, z): file_path = '%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain', z, x,", "CGIAR-CSI, Produced using Copernicus data and \" \"information funded by", "for i in range(self.thread_num): thread = TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q,", "\"startY\": 0 } ], [ { \"endX\": 4095, \"endY\": 2047,", "0, \"startY\": 0 } ], [ { \"endX\": 32767, \"endY\":", "self.get_url(x, y, z) resp = requests.get(_url, params=param, stream=True, timeout=2) break", "self.division_done_signal.emit(count) for bbox in bboxs: while True: if not self.running:", "write_db=write_db, db_file_name='Terrain.db') self.token = token self._init_metadata( format='terrain', bounds='%f,%f,%f,%f' % (self.bbox.min_lng,", "thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for thread in self.threads: thread.start() for thread in", "- 30m resolution CONUS, 30m resolution \" \"SRTM between 60N", "4095, \"endY\": 2047, \"startX\": 0, \"startY\": 0 } ], [", "= 0 bboxs = self.cut_bbox() for bbox in bboxs: _count", "\"endY\": 511, \"startX\": 0, \"startY\": 0 } ], [ {", "-90.0 max_lat = 90.0 start_zoom = 0 end_zoom = 5", "'Terrain', z, x, y, 'terrain') if os.path.exists(file_path): self._data2DB(x, y, z,", "BoundBox(max_lat, max_lng, min_lat, min_lng, start_zoom, end_zoom) d = TerrainDownloadEngine(root, bbox,", "\"attribution\": \"© Analytical Graphics Inc., © CGIAR-CSI, Produced using Copernicus", "time from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox def get_access_token(token):", "0 bboxs = self.cut_bbox() for bbox in bboxs: _count =", "None time.sleep(3) requre_count += 1 if resp is None: return", "[ [ { \"endX\": 1, \"endY\": 0, \"startX\": 0, \"startY\":", "16383, \"endY\": 8191, \"startX\": 0, \"startY\": 0 } ], [", "8, logger) d.start() time.sleep(10000) logger.error('main thread out') except Exception as", "\"quantized-mesh-1.0\", \"maxzoom\": 13, \"minzoom\": 0, \"name\": \"world\", \"projection\": \"EPSG:4326\", \"scheme\":", "Inc., © CGIAR-CSI, Produced using Copernicus data and \" \"information", "{ \"endX\": 15, \"endY\": 7, \"startX\": 0, \"startY\": 0 }", "logger = logging.getLogger('down') try: root = r'/Users/cugxy/Documents/data/downloader' formatter = logging.Formatter('%(levelname)s-%(message)s')", "\"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self, root_dir, bbox, token, task_q, logger=None, write_db=False): super(TerrainDownloaderThread,", "= logging.StreamHandler() log_file = os.path.join(root, 'down.log') file_hdlr = logging.FileHandler(log_file) file_hdlr.setFormatter(formatter)", "\"startY\": 0 } ], [ { \"endX\": 16383, \"endY\": 8191,", "bbox, z): min_x, min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z) max_x, max_y", "thread.wait() for t in self.threads: t.stop() t.quit() self.threads = []", "time.sleep(10000) logger.error('main thread out') except Exception as e: logger.error(e) if", "logger=None, write_db=False): super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger, write_db=write_db) self.root_dir = root_dir", "= TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q, self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for", "DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox def get_access_token(token): resp = None request_count", "], [ { \"endX\": 16383, \"endY\": 8191, \"startX\": 0, \"startY\":", "], [ { \"endX\": 2047, \"endY\": 1023, \"startX\": 0, \"startY\":", "\"description\": \"STK World Terrain Premium Tileset, v1.3. 10m - 30m", "0 } ], [ { \"endX\": 7, \"endY\": 3, \"startX\":", "\"endX\": 32767, \"endY\": 16383, \"startX\": 0, \"startY\": 0 } ]", "0, \"startY\": 0 } ], [ { \"endX\": 127, \"endY\":", "255, \"endY\": 127, \"startX\": 0, \"startY\": 0 } ], [", "\"minzoom\": 0, \"name\": \"world\", \"projection\": \"EPSG:4326\", \"scheme\": \"tms\", \"tilejson\": \"2.1.0\",", "import logging import requests import time from map_download.cmd.BaseDownloader import DownloadEngine,", "[ { \"endX\": 15, \"endY\": 7, \"startX\": 0, \"startY\": 0", "x, y, z): return self.URL.format(x=x, y=y, z=z) def _download(self, x,", "t.quit() self.threads = [] self.download_done_signal.emit() except Exception as e: if", "\"endY\": 255, \"startX\": 0, \"startY\": 0 } ], [ {", "is None: return -1 if resp.status_code != 200: return -1", "try: request_count += 1 param = {'access_token': token} resp =", "{ \"endX\": 127, \"endY\": 63, \"startX\": 0, \"startY\": 0 }", "request_count = 0 url = \"https://api.cesium.com/v1/assets/1/endpoint\" while True: if request_count", "self.threads.append(thread) for thread in self.threads: thread.start() for thread in self.threads:", "0, \"startY\": 0 } ], [ { \"endX\": 3, \"endY\":", "\"endY\": 4095, \"startX\": 0, \"startY\": 0 } ], [ {", "y, z) resp = requests.get(_url, params=param, stream=True, timeout=2) break except", "resp = None request_count = 0 url = \"https://api.cesium.com/v1/assets/1/endpoint\" while", "task_q = self.get_task_queue(bbox) self.threads = [] for i in range(self.thread_num):", "hdlr = logging.StreamHandler() log_file = os.path.join(root, 'down.log') file_hdlr = logging.FileHandler(log_file)", "bbox, token, task_q, logger=None, write_db=False): super(TerrainDownloaderThread, self).__init__( root_dir, bbox, task_q,", "except Exception as e: resp = None time.sleep(3) requre_count +=", "_dir = os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir, exist_ok=True) metadatas_path = os.path.join(_dir, 'layer.json')", "as f: json.dump(metadatas, f) except Exception as e: if self.logger", "self).__init__(bbox, thread_num, logger, write_db=write_db) self.root_dir = root_dir self.token = token", "World Terrain Premium Tileset, v1.3. 10m - 30m resolution CONUS,", "timeout=2) break except Exception as e: resp = None time.sleep(3)", "t in self.threads: t.stop() t.quit() self.threads = [] self.download_done_signal.emit() except", "file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng = -180.0 max_lng = 180.0", "param = {'access_token': token} resp = requests.get(url, params=param, timeout=2) if", "param = {'extensions': 'octvertexnormals-watermask', 'v': '1.1.0', 'access_token': access_token} while True:", "[ { \"endX\": 1023, \"endY\": 511, \"startX\": 0, \"startY\": 0", "255, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "for chunk in resp.iter_content(chunk_size=1024): if chunk: f.write(chunk) except Exception as", "\"startY\": 0 } ], [ { \"endX\": 2047, \"endY\": 1023,", "def run(self): try: self.generate_metadata() count = 0 bboxs = self.cut_bbox()", "{ \"endX\": 7, \"endY\": 3, \"startX\": 0, \"startY\": 0 }", "'down.log') file_hdlr = logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng =", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 255,", "0, \"startY\": 0 } ], [ { \"endX\": 7, \"endY\":", "7, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "min_lat = -90.0 max_lat = 90.0 start_zoom = 0 end_zoom", "30m resolution CONUS, 30m resolution \" \"SRTM between 60N and", "], [ { \"endX\": 15, \"endY\": 7, \"startX\": 0, \"startY\":", "logger.error('main thread out') except Exception as e: logger.error(e) if 0:", "coding: utf-8 -*- # coding=utf-8 import json import os import", "511, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "self.bbox, self.token, task_q, self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for thread in", "min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z) max_x, max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng,", "511, \"endY\": 255, \"startX\": 0, \"startY\": 0 } ], [", "def __init__(self, root_dir, bbox, token, thread_num, logger=None, write_db=False): super(TerrainDownloadEngine, self).__init__(bbox,", "file_path) return 0 os.makedirs(os.path.dirname(file_path), exist_ok=True) resp = None requre_count =", "'1.1.0', 'access_token': access_token} while True: if requre_count > 4: break", "3, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "self.token = token def bbox2xyz(self, bbox, z): min_x, min_y =", "= os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir, exist_ok=True) metadatas_path = os.path.join(_dir, 'layer.json') with", "os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir, exist_ok=True) metadatas_path = os.path.join(_dir, 'layer.json') with open(metadatas_path,", "self.running: time.sleep(0.01) else: break task_q = self.get_task_queue(bbox) self.threads = []", "8191, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "metadatas = { \"attribution\": \"© Analytical Graphics Inc., © CGIAR-CSI,", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 127,", "4095, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "self.token = token self._init_metadata( format='terrain', bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng,", "bbox = BoundBox(max_lat, max_lng, min_lat, min_lng, start_zoom, end_zoom) d =", "token def bbox2xyz(self, bbox, z): min_x, min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng,", "resp.json() return resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread): URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self,", "chunk in resp.iter_content(chunk_size=1024): if chunk: f.write(chunk) except Exception as e:", "None resp_json = resp.json() return resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread): URL =", "f: for chunk in resp.iter_content(chunk_size=1024): if chunk: f.write(chunk) except Exception", "0 } ], [ { \"endX\": 4095, \"endY\": 2047, \"startX\":", "+ 1 def generate_metadata(self): try: metadatas = { \"attribution\": \"©", "], [ { \"endX\": 255, \"endY\": 127, \"startX\": 0, \"startY\":", "\"startY\": 0 } ], [ { \"endX\": 8191, \"endY\": 4095,", "], \"version\": \"1.31376.0\" } _dir = os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir, exist_ok=True)", "], \"description\": \"STK World Terrain Premium Tileset, v1.3. 10m -", "root_dir = '' def __init__(self, root_dir, bbox, token, thread_num, logger=None,", "f) except Exception as e: if self.logger is not None:", "start_zoom = 0 end_zoom = 5 bbox = BoundBox(max_lat, max_lng,", "\"1.31376.0\" } _dir = os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir, exist_ok=True) metadatas_path =", "return -1 try: with open(file_path, 'wb') as f: for chunk", "0, \"startY\": 0 } ], [ { \"endX\": 1023, \"endY\":", "the European Union - EU-DEM layers\", \"available\": [ [ {", "90.0 start_zoom = 0 end_zoom = 5 bbox = BoundBox(max_lat,", "self.generate_metadata() count = 0 bboxs = self.cut_bbox() for bbox in", "= 5 bbox = BoundBox(max_lat, max_lng, min_lat, min_lng, start_zoom, end_zoom)", "is None: return None resp_json = resp.json() return resp_json.get('accessToken') class", "} ], [ { \"endX\": 63, \"endY\": 31, \"startX\": 0,", "], [ { \"endX\": 31, \"endY\": 15, \"startX\": 0, \"startY\":", "!= 200: continue break except Exception as e: resp =", "self._data2DB(x, y, z, file_path) return 0 os.makedirs(os.path.dirname(file_path), exist_ok=True) resp =", "not self.running: time.sleep(0.01) else: break task_q = self.get_task_queue(bbox) self.threads =", "formatter = logging.Formatter('%(levelname)s-%(message)s') hdlr = logging.StreamHandler() log_file = os.path.join(root, 'down.log')", "\"endY\": 2047, \"startX\": 0, \"startY\": 0 } ], [ {", "16383, \"startX\": 0, \"startY\": 0 } ] ], \"bounds\": [-180,", "\"endY\": 15, \"startX\": 0, \"startY\": 0 } ], [ {", "== '__main__': if 1: logger = logging.getLogger('down') try: root =", "timeout=2) if resp.status_code != 200: continue break except Exception as", "break except Exception as e: resp = None time.sleep(3) if", "write_db=False): super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger, write_db=write_db) self.root_dir = root_dir self.token", "} ], [ { \"endX\": 3, \"endY\": 1, \"startX\": 0,", "resp.status_code != 200: continue break except Exception as e: resp", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 15,", "} ], [ { \"endX\": 15, \"endY\": 7, \"startX\": 0,", "z): file_path = '%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain', z, x, y,", "logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng = -180.0 max_lng =", "write_db=write_db) self.root_dir = root_dir self.token = token def bbox2xyz(self, bbox,", "Copernicus data and \" \"information funded by the European Union", "params=param, timeout=2) if resp.status_code != 200: continue break except Exception", "self.cut_bbox() for bbox in bboxs: _count = self.get_task_count(bbox) count +=", "with open(metadatas_path, 'w') as f: json.dump(metadatas, f) except Exception as", "access_token = get_access_token(self.token) if access_token is None: return -1 param", "except Exception as e: if self.logger is not None: self.logger.exception(e)", "min_lng, start_zoom, end_zoom) d = TerrainDownloadEngine(root, bbox, 8, logger) d.start()", "out') except Exception as e: logger.error(e) if 0: accessToken =", "], [ { \"endX\": 8191, \"endY\": 4095, \"startX\": 0, \"startY\":", "15, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "if chunk: f.write(chunk) except Exception as e: return -1 self._data2DB(x,", "= token self._init_metadata( format='terrain', bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat))", "'Terrain') os.makedirs(_dir, exist_ok=True) metadatas_path = os.path.join(_dir, 'layer.json') with open(metadatas_path, 'w')", "= requests.get(_url, params=param, stream=True, timeout=2) break except Exception as e:", "\"endY\": 63, \"startX\": 0, \"startY\": 0 } ], [ {", "\"endX\": 511, \"endY\": 255, \"startX\": 0, \"startY\": 0 } ],", "\"startY\": 0 } ], [ { \"endX\": 1023, \"endY\": 511,", "[ { \"endX\": 7, \"endY\": 3, \"startX\": 0, \"startY\": 0", "break try: request_count += 1 param = {'access_token': token} resp", "= 0 url = \"https://api.cesium.com/v1/assets/1/endpoint\" while True: if request_count >", "self.logger is not None: self.logger.exception(e) def run(self): try: self.generate_metadata() count", "e: if self.logger is not None: self.logger.error(e) if __name__ ==", "min_lng = -180.0 max_lng = 180.0 min_lat = -90.0 max_lat", "if resp.status_code != 200: continue break except Exception as e:" ]
[ "2017 @author: dhingratul \"\"\" import urllib.request import os from selenium", "download after the timeout period, an exceptions is thrown, and", "1 return flag def getDriver(url): driver = webdriver.Chrome() driver.get(url) return", "return driver def is_valid_pdf(fn): \"\"\"Check is the PDF valid \"\"\"", "context = ssl._create_unverified_context() response = urllib.request.urlopen(pdf_url, context=context) else: response =", "bs4 import BeautifulSoup import ssl import requests import wget from", "import webdriver from selenium.webdriver.support.ui import Select from bs4 import BeautifulSoup", "PdfFileReader def download_file(pdf_url, mdir, filename, flag=False): if flag is True:", "exceptions is thrown, and we try again except requests.exceptions.RequestException as", "0: flag = 0 else: flag = 1 return flag", "with open(file_out, \"a\") as myfile: myfile.write(pdf_url + '\\n') filename =", "download_file_W(pdf_url, mdir, filename, flag=False): filename = mdir + filename ssl._create_default_https_context", "flag=False): filename = mdir + filename ssl._create_default_https_context = ssl._create_unverified_context wget.download(pdf_url,", "is_valid_pdf(fn): \"\"\"Check is the PDF valid \"\"\" try: with open(fn,", "-*- \"\"\" Created on Fri Nov 10 23:28:58 2017 @author:", "+ filename ssl._create_default_https_context = ssl._create_unverified_context wget.download(pdf_url, filename) if os.stat(filename).st_size ==", "try: with open(fn, 'rb') as f: pdf = PdfFileReader(f) numpages", "flag is True: context = ssl._create_unverified_context() response = urllib.request.urlopen(pdf_url, context=context)", "mdir + filename with open(filename, 'wb') as f: f.write(r.content) if", "\"\"\" try: with open(fn, 'rb') as f: pdf = PdfFileReader(f)", "from PyPDF2 import PdfFileReader def download_file(pdf_url, mdir, filename, flag=False): if", "= urllib.request.urlopen(pdf_url, context=context) else: response = urllib.request.urlopen(pdf_url) filename = mdir", "return flag def download_file_R(pdf_url, mdir, filename, file_out): requests.packages.urllib3.disable_warnings() while True:", "utf-8 -*- \"\"\" Created on Fri Nov 10 23:28:58 2017", "while True: # Keep trying until the webpage successfully downloads", "else: flag = 1 return flag def getDriver(url): driver =", "f: pdf = PdfFileReader(f) numpages = pdf.numPages return (numpages >", "= mdir + filename file = open(filename, 'wb') file.write(response.read()) if", "open(filename, 'wb') file.write(response.read()) if os.stat(filename).st_size == 0: flag = 0", "after the timeout period, an exceptions is thrown, and we", "'wb') file.write(response.read()) if os.stat(filename).st_size == 0: flag = 0 else:", "ssl._create_unverified_context wget.download(pdf_url, filename) if os.stat(filename).st_size == 0: flag = 0", "python3 # -*- coding: utf-8 -*- \"\"\" Created on Fri", "Select from bs4 import BeautifulSoup import ssl import requests import", "urllib.request.urlopen(pdf_url) filename = mdir + filename file = open(filename, 'wb')", "filename) if os.stat(filename).st_size == 0: flag = 0 else: flag", "from bs4 import BeautifulSoup import ssl import requests import wget", "verify=False, timeout=10) break # If it downloads, get out and", "myfile.write(pdf_url + '\\n') filename = mdir + filename with open(filename,", "break # If it downloads, get out and get on", "as e: with open(file_out, \"a\") as myfile: myfile.write(pdf_url + '\\n')", "True: context = ssl._create_unverified_context() response = urllib.request.urlopen(pdf_url, context=context) else: response", "trying until the webpage successfully downloads try: r = requests.get(pdf_url,", "open(file_out, \"a\") as myfile: myfile.write(pdf_url + '\\n') filename = mdir", "= mdir + filename ssl._create_default_https_context = ssl._create_unverified_context wget.download(pdf_url, filename) if", "= 1 return flag def download_file_W(pdf_url, mdir, filename, flag=False): filename", "except requests.exceptions.RequestException as e: with open(file_out, \"a\") as myfile: myfile.write(pdf_url", "on with life # If it doesn't download after the", "If it downloads, get out and get on with life", "f: f.write(r.content) if os.stat(filename).st_size == 0: flag = 0 else:", "pdf.numPages return (numpages > 0) except Exception as e: return", "timeout period, an exceptions is thrown, and we try again", "@author: dhingratul \"\"\" import urllib.request import os from selenium import", "we try again except requests.exceptions.RequestException as e: with open(file_out, \"a\")", "filename = mdir + filename file = open(filename, 'wb') file.write(response.read())", "\"a\") as myfile: myfile.write(pdf_url + '\\n') filename = mdir +", "+ filename file = open(filename, 'wb') file.write(response.read()) if os.stat(filename).st_size ==", "period, an exceptions is thrown, and we try again except", "-*- coding: utf-8 -*- \"\"\" Created on Fri Nov 10", "= urllib.request.urlopen(pdf_url) filename = mdir + filename file = open(filename,", "the timeout period, an exceptions is thrown, and we try", "flag = 1 file.close() return flag def download_file_R(pdf_url, mdir, filename,", "\"\"\" Created on Fri Nov 10 23:28:58 2017 @author: dhingratul", "from selenium.webdriver.support.ui import Select from bs4 import BeautifulSoup import ssl", "try again except requests.exceptions.RequestException as e: with open(file_out, \"a\") as", "import BeautifulSoup import ssl import requests import wget from PyPDF2", "webdriver from selenium.webdriver.support.ui import Select from bs4 import BeautifulSoup import", "mdir + filename file = open(filename, 'wb') file.write(response.read()) if os.stat(filename).st_size", "\"\"\"Check is the PDF valid \"\"\" try: with open(fn, 'rb')", "open(fn, 'rb') as f: pdf = PdfFileReader(f) numpages = pdf.numPages", "f.write(r.content) if os.stat(filename).st_size == 0: flag = 0 else: flag", "ssl import requests import wget from PyPDF2 import PdfFileReader def", "flag def download_file_R(pdf_url, mdir, filename, file_out): requests.packages.urllib3.disable_warnings() while True: #", "getDriver(url): driver = webdriver.Chrome() driver.get(url) return driver def is_valid_pdf(fn): \"\"\"Check", "pdf = PdfFileReader(f) numpages = pdf.numPages return (numpages > 0)", "response = urllib.request.urlopen(pdf_url, context=context) else: response = urllib.request.urlopen(pdf_url) filename =", "= 0 else: flag = 1 return flag def getDriver(url):", "urllib.request import os from selenium import webdriver from selenium.webdriver.support.ui import", "with life # If it doesn't download after the timeout", "flag = 1 return flag def getDriver(url): driver = webdriver.Chrome()", "import requests import wget from PyPDF2 import PdfFileReader def download_file(pdf_url,", "flag=False): if flag is True: context = ssl._create_unverified_context() response =", "mdir, filename, file_out): requests.packages.urllib3.disable_warnings() while True: # Keep trying until", "filename, file_out): requests.packages.urllib3.disable_warnings() while True: # Keep trying until the", "with open(fn, 'rb') as f: pdf = PdfFileReader(f) numpages =", "and we try again except requests.exceptions.RequestException as e: with open(file_out,", "0: flag = 0 else: flag = 1 file.close() return", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- \"\"\" Created on", "filename, flag=False): filename = mdir + filename ssl._create_default_https_context = ssl._create_unverified_context", "urllib.request.urlopen(pdf_url, context=context) else: response = urllib.request.urlopen(pdf_url) filename = mdir +", "get out and get on with life # If it", "timeout=10) break # If it downloads, get out and get", "and get on with life # If it doesn't download", "file.close() return flag def download_file_R(pdf_url, mdir, filename, file_out): requests.packages.urllib3.disable_warnings() while", "# If it downloads, get out and get on with", "get on with life # If it doesn't download after", "numpages = pdf.numPages return (numpages > 0) except Exception as", "'wb') as f: f.write(r.content) if os.stat(filename).st_size == 0: flag =", "context=context) else: response = urllib.request.urlopen(pdf_url) filename = mdir + filename", "def download_file_R(pdf_url, mdir, filename, file_out): requests.packages.urllib3.disable_warnings() while True: # Keep", "file = open(filename, 'wb') file.write(response.read()) if os.stat(filename).st_size == 0: flag", "os from selenium import webdriver from selenium.webdriver.support.ui import Select from", "with open(filename, 'wb') as f: f.write(r.content) if os.stat(filename).st_size == 0:", "flag = 1 return flag def download_file_W(pdf_url, mdir, filename, flag=False):", "driver = webdriver.Chrome() driver.get(url) return driver def is_valid_pdf(fn): \"\"\"Check is", "webdriver.Chrome() driver.get(url) return driver def is_valid_pdf(fn): \"\"\"Check is the PDF", "is the PDF valid \"\"\" try: with open(fn, 'rb') as", "os.stat(filename).st_size == 0: flag = 0 else: flag = 1", "file.write(response.read()) if os.stat(filename).st_size == 0: flag = 0 else: flag", "an exceptions is thrown, and we try again except requests.exceptions.RequestException", "requests.exceptions.RequestException as e: with open(file_out, \"a\") as myfile: myfile.write(pdf_url +", "filename, flag=False): if flag is True: context = ssl._create_unverified_context() response", "r = requests.get(pdf_url, verify=False, timeout=10) break # If it downloads,", "filename = mdir + filename with open(filename, 'wb') as f:", "open(filename, 'wb') as f: f.write(r.content) if os.stat(filename).st_size == 0: flag", "it downloads, get out and get on with life #", "ssl._create_unverified_context() response = urllib.request.urlopen(pdf_url, context=context) else: response = urllib.request.urlopen(pdf_url) filename", "downloads, get out and get on with life # If", "myfile: myfile.write(pdf_url + '\\n') filename = mdir + filename with", "== 0: flag = 0 else: flag = 1 return", "selenium import webdriver from selenium.webdriver.support.ui import Select from bs4 import", "= ssl._create_unverified_context wget.download(pdf_url, filename) if os.stat(filename).st_size == 0: flag =", "flag def download_file_W(pdf_url, mdir, filename, flag=False): filename = mdir +", "def is_valid_pdf(fn): \"\"\"Check is the PDF valid \"\"\" try: with", "1 file.close() return flag def download_file_R(pdf_url, mdir, filename, file_out): requests.packages.urllib3.disable_warnings()", "== 0: flag = 0 else: flag = 1 file.close()", "filename with open(filename, 'wb') as f: f.write(r.content) if os.stat(filename).st_size ==", "filename ssl._create_default_https_context = ssl._create_unverified_context wget.download(pdf_url, filename) if os.stat(filename).st_size == 0:", "again except requests.exceptions.RequestException as e: with open(file_out, \"a\") as myfile:", "# If it doesn't download after the timeout period, an", "PyPDF2 import PdfFileReader def download_file(pdf_url, mdir, filename, flag=False): if flag", "import ssl import requests import wget from PyPDF2 import PdfFileReader", "flag = 0 else: flag = 1 file.close() return flag", "0 else: flag = 1 file.close() return flag def download_file_R(pdf_url,", "def getDriver(url): driver = webdriver.Chrome() driver.get(url) return driver def is_valid_pdf(fn):", "as f: pdf = PdfFileReader(f) numpages = pdf.numPages return (numpages", "Keep trying until the webpage successfully downloads try: r =", "Nov 10 23:28:58 2017 @author: dhingratul \"\"\" import urllib.request import", "import wget from PyPDF2 import PdfFileReader def download_file(pdf_url, mdir, filename,", "ssl._create_default_https_context = ssl._create_unverified_context wget.download(pdf_url, filename) if os.stat(filename).st_size == 0: flag", "valid \"\"\" try: with open(fn, 'rb') as f: pdf =", "BeautifulSoup import ssl import requests import wget from PyPDF2 import", "= webdriver.Chrome() driver.get(url) return driver def is_valid_pdf(fn): \"\"\"Check is the", "10 23:28:58 2017 @author: dhingratul \"\"\" import urllib.request import os", "Fri Nov 10 23:28:58 2017 @author: dhingratul \"\"\" import urllib.request", "= PdfFileReader(f) numpages = pdf.numPages return (numpages > 0) except", "flag def getDriver(url): driver = webdriver.Chrome() driver.get(url) return driver def", "= open(filename, 'wb') file.write(response.read()) if os.stat(filename).st_size == 0: flag =", "23:28:58 2017 @author: dhingratul \"\"\" import urllib.request import os from", "else: flag = 1 return flag def download_file_W(pdf_url, mdir, filename,", "try: r = requests.get(pdf_url, verify=False, timeout=10) break # If it", "0 else: flag = 1 return flag def getDriver(url): driver", "the webpage successfully downloads try: r = requests.get(pdf_url, verify=False, timeout=10)", "download_file_R(pdf_url, mdir, filename, file_out): requests.packages.urllib3.disable_warnings() while True: # Keep trying", "filename file = open(filename, 'wb') file.write(response.read()) if os.stat(filename).st_size == 0:", "on Fri Nov 10 23:28:58 2017 @author: dhingratul \"\"\" import", "out and get on with life # If it doesn't", "import os from selenium import webdriver from selenium.webdriver.support.ui import Select", "= 1 return flag def getDriver(url): driver = webdriver.Chrome() driver.get(url)", "the PDF valid \"\"\" try: with open(fn, 'rb') as f:", "wget.download(pdf_url, filename) if os.stat(filename).st_size == 0: flag = 0 else:", "# Keep trying until the webpage successfully downloads try: r", "+ filename with open(filename, 'wb') as f: f.write(r.content) if os.stat(filename).st_size", "mdir + filename ssl._create_default_https_context = ssl._create_unverified_context wget.download(pdf_url, filename) if os.stat(filename).st_size", "return flag def getDriver(url): driver = webdriver.Chrome() driver.get(url) return driver", "it doesn't download after the timeout period, an exceptions is", "return flag def download_file_W(pdf_url, mdir, filename, flag=False): filename = mdir", "Created on Fri Nov 10 23:28:58 2017 @author: dhingratul \"\"\"", "doesn't download after the timeout period, an exceptions is thrown,", "def download_file_W(pdf_url, mdir, filename, flag=False): filename = mdir + filename", "dhingratul \"\"\" import urllib.request import os from selenium import webdriver", "import Select from bs4 import BeautifulSoup import ssl import requests", "+ '\\n') filename = mdir + filename with open(filename, 'wb')", "is True: context = ssl._create_unverified_context() response = urllib.request.urlopen(pdf_url, context=context) else:", "else: flag = 1 file.close() return flag def download_file_R(pdf_url, mdir,", "requests.packages.urllib3.disable_warnings() while True: # Keep trying until the webpage successfully", "filename = mdir + filename ssl._create_default_https_context = ssl._create_unverified_context wget.download(pdf_url, filename)", "else: response = urllib.request.urlopen(pdf_url) filename = mdir + filename file", "coding: utf-8 -*- \"\"\" Created on Fri Nov 10 23:28:58", "selenium.webdriver.support.ui import Select from bs4 import BeautifulSoup import ssl import", "from selenium import webdriver from selenium.webdriver.support.ui import Select from bs4", "webpage successfully downloads try: r = requests.get(pdf_url, verify=False, timeout=10) break", "is thrown, and we try again except requests.exceptions.RequestException as e:", "1 return flag def download_file_W(pdf_url, mdir, filename, flag=False): filename =", "flag = 0 else: flag = 1 return flag def", "= mdir + filename with open(filename, 'wb') as f: f.write(r.content)", "import PdfFileReader def download_file(pdf_url, mdir, filename, flag=False): if flag is", "return (numpages > 0) except Exception as e: return False", "True: # Keep trying until the webpage successfully downloads try:", "'rb') as f: pdf = PdfFileReader(f) numpages = pdf.numPages return", "driver def is_valid_pdf(fn): \"\"\"Check is the PDF valid \"\"\" try:", "= 0 else: flag = 1 file.close() return flag def", "= ssl._create_unverified_context() response = urllib.request.urlopen(pdf_url, context=context) else: response = urllib.request.urlopen(pdf_url)", "PDF valid \"\"\" try: with open(fn, 'rb') as f: pdf", "<reponame>vahini01/electoral_rolls #!/usr/bin/env python3 # -*- coding: utf-8 -*- \"\"\" Created", "downloads try: r = requests.get(pdf_url, verify=False, timeout=10) break # If", "= pdf.numPages return (numpages > 0) except Exception as e:", "e: with open(file_out, \"a\") as myfile: myfile.write(pdf_url + '\\n') filename", "= 0 else: flag = 1 return flag def download_file_W(pdf_url,", "requests import wget from PyPDF2 import PdfFileReader def download_file(pdf_url, mdir,", "until the webpage successfully downloads try: r = requests.get(pdf_url, verify=False,", "mdir, filename, flag=False): if flag is True: context = ssl._create_unverified_context()", "file_out): requests.packages.urllib3.disable_warnings() while True: # Keep trying until the webpage", "def download_file(pdf_url, mdir, filename, flag=False): if flag is True: context", "if flag is True: context = ssl._create_unverified_context() response = urllib.request.urlopen(pdf_url,", "mdir, filename, flag=False): filename = mdir + filename ssl._create_default_https_context =", "thrown, and we try again except requests.exceptions.RequestException as e: with", "If it doesn't download after the timeout period, an exceptions", "# -*- coding: utf-8 -*- \"\"\" Created on Fri Nov", "'\\n') filename = mdir + filename with open(filename, 'wb') as", "= requests.get(pdf_url, verify=False, timeout=10) break # If it downloads, get", "driver.get(url) return driver def is_valid_pdf(fn): \"\"\"Check is the PDF valid", "PdfFileReader(f) numpages = pdf.numPages return (numpages > 0) except Exception", "import urllib.request import os from selenium import webdriver from selenium.webdriver.support.ui", "response = urllib.request.urlopen(pdf_url) filename = mdir + filename file =", "download_file(pdf_url, mdir, filename, flag=False): if flag is True: context =", "= 1 file.close() return flag def download_file_R(pdf_url, mdir, filename, file_out):", "0 else: flag = 1 return flag def download_file_W(pdf_url, mdir,", "\"\"\" import urllib.request import os from selenium import webdriver from", "life # If it doesn't download after the timeout period,", "requests.get(pdf_url, verify=False, timeout=10) break # If it downloads, get out", "wget from PyPDF2 import PdfFileReader def download_file(pdf_url, mdir, filename, flag=False):", "successfully downloads try: r = requests.get(pdf_url, verify=False, timeout=10) break #", "if os.stat(filename).st_size == 0: flag = 0 else: flag =", "as myfile: myfile.write(pdf_url + '\\n') filename = mdir + filename", "as f: f.write(r.content) if os.stat(filename).st_size == 0: flag = 0" ]
[ "the most similar trajectory to a given country? Plots similar", "= len(LINE_STYLES) dist_diff = os.path.join('../exp/results/', 'knn_raw.json') f = open(dist_diff,) dist_diff", "= other_region.replace('*', '') plt.title(f'Comparing confirmed cases in {region} and {other_region}')", "for val in regions: df = data.filter_by_attribute( confirmed, \"Country/Region\", val)", "dist in dist_diff.items(): plt.style.use('fivethirtyeight') fig = plt.figure(figsize=(12, 12)) ax =", "label=val) ax.set_ylabel('# of confirmed cases') ax.set_xlabel(\"Time (days since Jan 22,", "cm = plt.get_cmap('jet') other_region = dist['manhattan'][0] regions = [region, other_region]", "json.load(f) for region, dist in dist_diff.items(): plt.style.use('fivethirtyeight') fig = plt.figure(figsize=(12,", "plt.style.use('fivethirtyeight') fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm =", "of confirmed cases') ax.set_xlabel(\"Time (days since Jan 22, 2020)\") ax.set_yscale('log')", "utils import data import os import sklearn import numpy as", "= open(dist_diff,) dist_diff = json.load(f) for region, dist in dist_diff.items():", "as np import json import matplotlib.pyplot as plt plt.style.use('fivethirtyeight') #", "= '../COVID-19/csse_covid_19_data/' # ------------------------------------------ confirmed = os.path.join( BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv')", "len(LINE_STYLES) dist_diff = os.path.join('../exp/results/', 'knn_raw.json') f = open(dist_diff,) dist_diff =", "(days since Jan 22, 2020)\") ax.set_yscale('log') ax.legend() plt.tight_layout() region =", "cases = cases.sum(axis=0) lines = ax.plot(cases, label=val) ax.set_ylabel('# of confirmed", "json import matplotlib.pyplot as plt plt.style.use('fivethirtyeight') # ------------ HYPERPARAMETERS -------------", "sys sys.path.insert(0, '..') from utils import data import os import", "NUM_COLORS = 0 LINE_STYLES = ['solid', 'dashed', 'dotted'] NUM_STYLES =", "cm = plt.get_cmap('jet') NUM_COLORS = 0 LINE_STYLES = ['solid', 'dashed',", "os import sklearn import numpy as np import json import", "dist_diff.items(): plt.style.use('fivethirtyeight') fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm", "Jan 22, 2020)\") ax.set_yscale('log') ax.legend() plt.tight_layout() region = region.replace('*', '')", "<gh_stars>0 \"\"\" Experiment summary ------------------ Treat each province/state in a", "= fig.add_subplot(111) cm = plt.get_cmap('jet') other_region = dist['manhattan'][0] regions =", "in dist_diff.items(): plt.style.use('fivethirtyeight') fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111)", "= cases.sum(axis=0) lines = ax.plot(cases, label=val) ax.set_ylabel('# of confirmed cases')", "most similar trajectory to a given country? Plots similar countries", "plt.get_cmap('jet') other_region = dist['manhattan'][0] regions = [region, other_region] for val", "[] fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm =", "from utils import data import os import sklearn import numpy", "------------------------------------------ confirmed = os.path.join( BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed = data.load_csv_data(confirmed)", "['solid', 'dashed', 'dotted'] NUM_STYLES = len(LINE_STYLES) dist_diff = os.path.join('../exp/results/', 'knn_raw.json')", "= plt.get_cmap('jet') other_region = dist['manhattan'][0] regions = [region, other_region] for", "plt.title(f'Comparing confirmed cases in {region} and {other_region}') plt.savefig(f'results/raw_manhattan/{region}.png') plt.close() print(region)", "[] targets = [] fig = plt.figure(figsize=(12, 12)) ax =", "as plt plt.style.use('fivethirtyeight') # ------------ HYPERPARAMETERS ------------- BASE_PATH = '../COVID-19/csse_covid_19_data/'", "data import os import sklearn import numpy as np import", "plt.style.use('fivethirtyeight') # ------------ HYPERPARAMETERS ------------- BASE_PATH = '../COVID-19/csse_covid_19_data/' # ------------------------------------------", "df = data.filter_by_attribute( confirmed, \"Country/Region\", val) cases, labels = data.get_cases_chronologically(df)", "province/state in a country cases over time as a vector,", "K-Nearest Neighbor between countries. What country has the most similar", "data.filter_by_attribute( confirmed, \"Country/Region\", val) cases, labels = data.get_cases_chronologically(df) cases =", "a vector, do a simple K-Nearest Neighbor between countries. What", "\"\"\" Experiment summary ------------------ Treat each province/state in a country", "region = region.replace('*', '') other_region = other_region.replace('*', '') plt.title(f'Comparing confirmed", "time as a vector, do a simple K-Nearest Neighbor between", "over time as a vector, do a simple K-Nearest Neighbor", "trajectory to a given country? Plots similar countries \"\"\" import", "0 LINE_STYLES = ['solid', 'dashed', 'dotted'] NUM_STYLES = len(LINE_STYLES) dist_diff", "features = [] targets = [] fig = plt.figure(figsize=(12, 12))", "= data.get_cases_chronologically(df) cases = cases.sum(axis=0) lines = ax.plot(cases, label=val) ax.set_ylabel('#", "data.get_cases_chronologically(df) cases = cases.sum(axis=0) lines = ax.plot(cases, label=val) ax.set_ylabel('# of", "import numpy as np import json import matplotlib.pyplot as plt", "other_region.replace('*', '') plt.title(f'Comparing confirmed cases in {region} and {other_region}') plt.savefig(f'results/raw_manhattan/{region}.png')", "'time_series_covid19_confirmed_global.csv') confirmed = data.load_csv_data(confirmed) features = [] targets = []", "'') plt.title(f'Comparing confirmed cases in {region} and {other_region}') plt.savefig(f'results/raw_manhattan/{region}.png') plt.close()", "Neighbor between countries. What country has the most similar trajectory", "given country? Plots similar countries \"\"\" import sys sys.path.insert(0, '..')", "country? Plots similar countries \"\"\" import sys sys.path.insert(0, '..') from", "ax.set_xlabel(\"Time (days since Jan 22, 2020)\") ax.set_yscale('log') ax.legend() plt.tight_layout() region", "'dotted'] NUM_STYLES = len(LINE_STYLES) dist_diff = os.path.join('../exp/results/', 'knn_raw.json') f =", "cases.sum(axis=0) lines = ax.plot(cases, label=val) ax.set_ylabel('# of confirmed cases') ax.set_xlabel(\"Time", "countries \"\"\" import sys sys.path.insert(0, '..') from utils import data", "import matplotlib.pyplot as plt plt.style.use('fivethirtyeight') # ------------ HYPERPARAMETERS ------------- BASE_PATH", "# ------------------------------------------ confirmed = os.path.join( BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed =", "simple K-Nearest Neighbor between countries. What country has the most", "data.load_csv_data(confirmed) features = [] targets = [] fig = plt.figure(figsize=(12,", "dist_diff = json.load(f) for region, dist in dist_diff.items(): plt.style.use('fivethirtyeight') fig", "labels = data.get_cases_chronologically(df) cases = cases.sum(axis=0) lines = ax.plot(cases, label=val)", "dist['manhattan'][0] regions = [region, other_region] for val in regions: df", "Experiment summary ------------------ Treat each province/state in a country cases", "What country has the most similar trajectory to a given", "import data import os import sklearn import numpy as np", "import sklearn import numpy as np import json import matplotlib.pyplot", "= fig.add_subplot(111) cm = plt.get_cmap('jet') NUM_COLORS = 0 LINE_STYLES =", "a country cases over time as a vector, do a", "= os.path.join('../exp/results/', 'knn_raw.json') f = open(dist_diff,) dist_diff = json.load(f) for", "Plots similar countries \"\"\" import sys sys.path.insert(0, '..') from utils", "f = open(dist_diff,) dist_diff = json.load(f) for region, dist in", "plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') other_region =", "BASE_PATH = '../COVID-19/csse_covid_19_data/' # ------------------------------------------ confirmed = os.path.join( BASE_PATH, 'csse_covid_19_time_series',", "sys.path.insert(0, '..') from utils import data import os import sklearn", "= dist['manhattan'][0] regions = [region, other_region] for val in regions:", "------------------ Treat each province/state in a country cases over time", "LINE_STYLES = ['solid', 'dashed', 'dotted'] NUM_STYLES = len(LINE_STYLES) dist_diff =", "'knn_raw.json') f = open(dist_diff,) dist_diff = json.load(f) for region, dist", "= 0 LINE_STYLES = ['solid', 'dashed', 'dotted'] NUM_STYLES = len(LINE_STYLES)", "do a simple K-Nearest Neighbor between countries. What country has", "countries. What country has the most similar trajectory to a", "val) cases, labels = data.get_cases_chronologically(df) cases = cases.sum(axis=0) lines =", "region, dist in dist_diff.items(): plt.style.use('fivethirtyeight') fig = plt.figure(figsize=(12, 12)) ax", "'dashed', 'dotted'] NUM_STYLES = len(LINE_STYLES) dist_diff = os.path.join('../exp/results/', 'knn_raw.json') f", "[region, other_region] for val in regions: df = data.filter_by_attribute( confirmed,", "for region, dist in dist_diff.items(): plt.style.use('fivethirtyeight') fig = plt.figure(figsize=(12, 12))", "similar countries \"\"\" import sys sys.path.insert(0, '..') from utils import", "os.path.join( BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed = data.load_csv_data(confirmed) features = []", "import sys sys.path.insert(0, '..') from utils import data import os", "plt.tight_layout() region = region.replace('*', '') other_region = other_region.replace('*', '') plt.title(f'Comparing", "to a given country? Plots similar countries \"\"\" import sys", "in a country cases over time as a vector, do", "regions: df = data.filter_by_attribute( confirmed, \"Country/Region\", val) cases, labels =", "= plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') NUM_COLORS", "= ax.plot(cases, label=val) ax.set_ylabel('# of confirmed cases') ax.set_xlabel(\"Time (days since", "22, 2020)\") ax.set_yscale('log') ax.legend() plt.tight_layout() region = region.replace('*', '') other_region", "import json import matplotlib.pyplot as plt plt.style.use('fivethirtyeight') # ------------ HYPERPARAMETERS", "= data.load_csv_data(confirmed) features = [] targets = [] fig =", "fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet')", "regions = [region, other_region] for val in regions: df =", "ax.plot(cases, label=val) ax.set_ylabel('# of confirmed cases') ax.set_xlabel(\"Time (days since Jan", "ax = fig.add_subplot(111) cm = plt.get_cmap('jet') NUM_COLORS = 0 LINE_STYLES", "lines = ax.plot(cases, label=val) ax.set_ylabel('# of confirmed cases') ax.set_xlabel(\"Time (days", "cases') ax.set_xlabel(\"Time (days since Jan 22, 2020)\") ax.set_yscale('log') ax.legend() plt.tight_layout()", "matplotlib.pyplot as plt plt.style.use('fivethirtyeight') # ------------ HYPERPARAMETERS ------------- BASE_PATH =", "country has the most similar trajectory to a given country?", "since Jan 22, 2020)\") ax.set_yscale('log') ax.legend() plt.tight_layout() region = region.replace('*',", "# ------------ HYPERPARAMETERS ------------- BASE_PATH = '../COVID-19/csse_covid_19_data/' # ------------------------------------------ confirmed", "= region.replace('*', '') other_region = other_region.replace('*', '') plt.title(f'Comparing confirmed cases", "dist_diff = os.path.join('../exp/results/', 'knn_raw.json') f = open(dist_diff,) dist_diff = json.load(f)", "region.replace('*', '') other_region = other_region.replace('*', '') plt.title(f'Comparing confirmed cases in", "12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') other_region = dist['manhattan'][0]", "fig.add_subplot(111) cm = plt.get_cmap('jet') NUM_COLORS = 0 LINE_STYLES = ['solid',", "country cases over time as a vector, do a simple", "= ['solid', 'dashed', 'dotted'] NUM_STYLES = len(LINE_STYLES) dist_diff = os.path.join('../exp/results/',", "open(dist_diff,) dist_diff = json.load(f) for region, dist in dist_diff.items(): plt.style.use('fivethirtyeight')", "ax = fig.add_subplot(111) cm = plt.get_cmap('jet') other_region = dist['manhattan'][0] regions", "targets = [] fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111)", "2020)\") ax.set_yscale('log') ax.legend() plt.tight_layout() region = region.replace('*', '') other_region =", "------------ HYPERPARAMETERS ------------- BASE_PATH = '../COVID-19/csse_covid_19_data/' # ------------------------------------------ confirmed =", "similar trajectory to a given country? Plots similar countries \"\"\"", "as a vector, do a simple K-Nearest Neighbor between countries.", "vector, do a simple K-Nearest Neighbor between countries. What country", "between countries. What country has the most similar trajectory to", "NUM_STYLES = len(LINE_STYLES) dist_diff = os.path.join('../exp/results/', 'knn_raw.json') f = open(dist_diff,)", "os.path.join('../exp/results/', 'knn_raw.json') f = open(dist_diff,) dist_diff = json.load(f) for region,", "= plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') other_region", "val in regions: df = data.filter_by_attribute( confirmed, \"Country/Region\", val) cases,", "= data.filter_by_attribute( confirmed, \"Country/Region\", val) cases, labels = data.get_cases_chronologically(df) cases", "'') other_region = other_region.replace('*', '') plt.title(f'Comparing confirmed cases in {region}", "------------- BASE_PATH = '../COVID-19/csse_covid_19_data/' # ------------------------------------------ confirmed = os.path.join( BASE_PATH,", "confirmed = os.path.join( BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed = data.load_csv_data(confirmed) features", "12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') NUM_COLORS = 0", "confirmed cases') ax.set_xlabel(\"Time (days since Jan 22, 2020)\") ax.set_yscale('log') ax.legend()", "= [region, other_region] for val in regions: df = data.filter_by_attribute(", "Treat each province/state in a country cases over time as", "np import json import matplotlib.pyplot as plt plt.style.use('fivethirtyeight') # ------------", "ax.set_yscale('log') ax.legend() plt.tight_layout() region = region.replace('*', '') other_region = other_region.replace('*',", "other_region] for val in regions: df = data.filter_by_attribute( confirmed, \"Country/Region\",", "confirmed, \"Country/Region\", val) cases, labels = data.get_cases_chronologically(df) cases = cases.sum(axis=0)", "other_region = other_region.replace('*', '') plt.title(f'Comparing confirmed cases in {region} and", "a simple K-Nearest Neighbor between countries. What country has the", "in regions: df = data.filter_by_attribute( confirmed, \"Country/Region\", val) cases, labels", "ax.set_ylabel('# of confirmed cases') ax.set_xlabel(\"Time (days since Jan 22, 2020)\")", "= json.load(f) for region, dist in dist_diff.items(): plt.style.use('fivethirtyeight') fig =", "= [] targets = [] fig = plt.figure(figsize=(12, 12)) ax", "cases over time as a vector, do a simple K-Nearest", "ax.legend() plt.tight_layout() region = region.replace('*', '') other_region = other_region.replace('*', '')", "HYPERPARAMETERS ------------- BASE_PATH = '../COVID-19/csse_covid_19_data/' # ------------------------------------------ confirmed = os.path.join(", "BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed = data.load_csv_data(confirmed) features = [] targets", "\"Country/Region\", val) cases, labels = data.get_cases_chronologically(df) cases = cases.sum(axis=0) lines", "confirmed = data.load_csv_data(confirmed) features = [] targets = [] fig", "'../COVID-19/csse_covid_19_data/' # ------------------------------------------ confirmed = os.path.join( BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed", "= [] fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm", "plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') NUM_COLORS =", "= os.path.join( BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed = data.load_csv_data(confirmed) features =", "plt plt.style.use('fivethirtyeight') # ------------ HYPERPARAMETERS ------------- BASE_PATH = '../COVID-19/csse_covid_19_data/' #", "sklearn import numpy as np import json import matplotlib.pyplot as", "a given country? Plots similar countries \"\"\" import sys sys.path.insert(0,", "'..') from utils import data import os import sklearn import", "fig.add_subplot(111) cm = plt.get_cmap('jet') other_region = dist['manhattan'][0] regions = [region,", "each province/state in a country cases over time as a", "plt.get_cmap('jet') NUM_COLORS = 0 LINE_STYLES = ['solid', 'dashed', 'dotted'] NUM_STYLES", "= plt.get_cmap('jet') NUM_COLORS = 0 LINE_STYLES = ['solid', 'dashed', 'dotted']", "\"\"\" import sys sys.path.insert(0, '..') from utils import data import", "has the most similar trajectory to a given country? Plots", "import os import sklearn import numpy as np import json", "cases, labels = data.get_cases_chronologically(df) cases = cases.sum(axis=0) lines = ax.plot(cases,", "numpy as np import json import matplotlib.pyplot as plt plt.style.use('fivethirtyeight')", "summary ------------------ Treat each province/state in a country cases over", "'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed = data.load_csv_data(confirmed) features = [] targets =", "other_region = dist['manhattan'][0] regions = [region, other_region] for val in" ]
[ "context (currently not happening, since unclear, how and why helpful)", "not happening, since unclear, how and why helpful) # self.device", "information needs to be passed to # version D self.training", "* x^{m + 1}|) `C`: Q(x) = (0.1 + |b_0", "import Rational_base class Rational(Rational_base, HybridBlock): \"\"\" Rational Activation Function, inheriting", "x + b_1 * x^2 + ... + b_m *", "\"rational activation function version %s not implemented\" % version) def", "``(5, 4)`` cuda (bool): whether to execute on cuda device.", "Function, inheriting from ``mxnet.gluon.HybridBlock``. Arguments: approx_func (str): The name of", "init=initializer.Constant( w_denominator), grad_req='write' if trainable else 'null', differentiable=trainable) # register", "Rational to use. Rational(x) = P(x)/Q(x), where P(x) = (a_0", "amount of weights in numerator and denominator, since we need", "degrees of the numerator (P) and denominator (Q). Default ``(5,", "+ 1}|) `C`: Q(x) = (0.1 + |b_0 + b_1", "x^2 + ... + a_n * x^n) and `A`: Q(x)", "self.training = trainable self.degrees = degrees self.version = version self.init_approximation", "%s not implemented\" % version) def hybrid_forward(self, F, x, numerator,", "Q(x) = (0.1 + |b_0 + b_1 * x +", "from rational.mxnet.versions import _version_a, _version_b, _version_c, _version_d from rational._base.rational_base import", "numpy(self): \"\"\" Returns a numpy version of this activation function.", "'B': _version_b, 'C': _version_c, 'D': _version_d} \\ .get(version) if self.rational_func", "pass. Default ``True`` Returns: HybridBlock: Rational hybrid block \"\"\" def", "HybridBlock): \"\"\" Rational Activation Function, inheriting from ``mxnet.gluon.HybridBlock``. Arguments: approx_func", "with MXNET networks. \"\"\" import mxnet as mx from mxnet", "+ ... + b_m * x^m|) `D`: like `B` with", "cuda (bool): whether to execute on cuda device. NOTE: THIS", "trainable self.degrees = degrees self.version = version self.init_approximation = approx_func", "`C`: Q(x) = (0.1 + |b_0 + b_1 * x", "how and why helpful) # self.device = gpu() if cuda", "HybridBlock: Rational hybrid block \"\"\" def __init__(self, approx_func='leaky_relu', degrees=(5, 4),", "= (0.1 + |b_0 + b_1 * x + b_2", "this activation function. \"\"\" from rational.numpy import Rational as Rational_numpy", "of weights in numerator and denominator, since we need them", "+ a_2 * x^2 + ... + a_n * x^n)", "D self.training = trainable self.init_approximation = approx_func # set rational", "self.rational_func is None: raise ValueError( \"rational activation function version %s", "self.numerator_length = len(w_numerator) self.denominator_length = len(w_denominator) self.training = trainable self.degrees", "activation function version self.rational_func = {'A': _version_a, 'B': _version_b, 'C':", "cuda=False, version='A', trainable=True, **kwargs): super(Rational, self).__init__(**kwargs) # read initial parameter", "from mxnet import initializer from mxnet.gluon import HybridBlock from rational.utils.get_weights", "parameter configuration from external files w_numerator, w_denominator = get_parameters( version,", "Rational as Rational_numpy rational_n = Rational_numpy(self.init_approximation, self.degrees, self.version) rational_n.numerator =", "* x^m|) `D`: like `B` with noised coefficients b_i Default", "w_numerator, w_denominator = get_parameters( version, degrees, approx_func) # convert w_numerator", "are available in `rational.rationals_config.json`. Default: ``leaky_relu`` degrees (tuple of int):", "= (a_0 + a_1 * x + a_2 * x^2", "are unable to retrieve them at later stages self.numerator_length =", "w_numerator and w_denominator to mxnet arrays w_numerator = mx.nd.array(w_numerator) w_denominator", "from rational.utils.get_weights import get_parameters from rational.mxnet.versions import _version_a, _version_b, _version_c,", "cuda else cpu() # register and configure weights (numerator and", "numpy version of this activation function. \"\"\" from rational.numpy import", "= Rational_numpy(self.init_approximation, self.degrees, self.version) rational_n.numerator = self.numerator.data().asnumpy().tolist() rational_n.denominator = self.denominator.data().asnumpy().tolist()", "+ a_n * x^n) and `A`: Q(x) = (1 +", "CUDA GPUS ARE USED WHEN IT IS POSSIBLE version (str):", "MXNET networks. \"\"\" import mxnet as mx from mxnet import", "`D`: like `B` with noised coefficients b_i Default ``A`` trainable", "register the amount of weights in numerator and denominator, since", "* x^2 + ... + b_m * x^m|) `D`: like", "= approx_func # set rational activation function version self.rational_func =", "= version self.init_approximation = approx_func # set specified context (currently", "* x^n) and `A`: Q(x) = (1 + |b_0 *", "(str): The name of the approximated function for initialisation. The", "the approximated function for initialisation. The different functions are available", "weights in numerator and denominator, since we need them during", "configuration from external files w_numerator, w_denominator = get_parameters( version, degrees,", "# read initial parameter configuration from external files w_numerator, w_denominator", "= P(x)/Q(x), where P(x) = (a_0 + a_1 * x", "(1 + |b_0 * x| + | b_1 * x^2|", "Rational(x) = P(x)/Q(x), where P(x) = (a_0 + a_1 *", "shape=(len(w_denominator),), init=initializer.Constant( w_denominator), grad_req='write' if trainable else 'null', differentiable=trainable) #", "x| + | b_1 * x^2| + ... + |", "initial parameter configuration from external files w_numerator, w_denominator = get_parameters(", "Rational Activation Function, inheriting from ``mxnet.gluon.HybridBlock``. Arguments: approx_func (str): The", "self.init_approximation = approx_func # set rational activation function version self.rational_func", "activation functions with MXNET networks. \"\"\" import mxnet as mx", "a_1 * x + a_2 * x^2 + ... +", "POSSIBLE version (str): Version of Rational to use. Rational(x) =", "ARE USED WHEN IT IS POSSIBLE version (str): Version of", "trainable self.init_approximation = approx_func # set rational activation function version", "with self.name_scope(): self.numerator = self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant( w_numerator), grad_req='write' if", "are trainable, i.e, if they are updated during backward pass.", "= mx.nd.array(w_denominator) # register the amount of weights in numerator", "P(x) = (a_0 + a_1 * x + a_2 *", "Rational Activation Functions for MXNET ======================================= This module allows you", "self.rational_func(F, x, numerator, denominator, self.training, self.numerator_length, self.denominator_length) def numpy(self): \"\"\"", "======================================= This module allows you to create Rational Neural Networks", "using Learnable Rational activation functions with MXNET networks. \"\"\" import", "trainable else 'null', differentiable=trainable) self.denominator = self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant( w_denominator),", "of this activation function. \"\"\" from rational.numpy import Rational as", "x^{m + 1}|) `C`: Q(x) = (0.1 + |b_0 +", "retrieve them at later stages self.numerator_length = len(w_numerator) self.denominator_length =", "_version_a, 'B': _version_b, 'C': _version_c, 'D': _version_d} \\ .get(version) if", "a numpy version of this activation function. \"\"\" from rational.numpy", "... + b_m * x^{m + 1}|) `C`: Q(x) =", "need them during # symbolic execution, but are unable to", "MXNET ======================================= This module allows you to create Rational Neural", "`rational.rationals_config.json`. Default: ``leaky_relu`` degrees (tuple of int): The degrees of", "_version_a, _version_b, _version_c, _version_d from rational._base.rational_base import Rational_base class Rational(Rational_base,", "we need them during # symbolic execution, but are unable", "= len(w_denominator) self.training = trainable self.degrees = degrees self.version =", "approx_func) # convert w_numerator and w_denominator to mxnet arrays w_numerator", "x^n) and `A`: Q(x) = (1 + |b_0 * x|", "+ |b_0 * x + b_1 * x^2 + ...", "they are updated during backward pass. Default ``True`` Returns: HybridBlock:", "# set rational activation function version self.rational_func = {'A': _version_a,", "degrees (tuple of int): The degrees of the numerator (P)", "during backward pass. Default ``True`` Returns: HybridBlock: Rational hybrid block", "if trainable else 'null', differentiable=trainable) # register whether function is", "* x^2 + ... + b_m * x^{m + 1}|)", "(bool): Whether the weights are trainable, i.e, if they are", "(numerator and denominator coefficients) with self.name_scope(): self.numerator = self.params.get(name='w_numerator', shape=(len(w_numerator),),", "# register whether function is trainable, since this information needs", "`B` with noised coefficients b_i Default ``A`` trainable (bool): Whether", "helpful) # self.device = gpu() if cuda else cpu() #", "and denominator (Q). Default ``(5, 4)`` cuda (bool): whether to", "WHEN IT IS POSSIBLE version (str): Version of Rational to", "passed to # version D self.training = trainable self.init_approximation =", "THIS PARAMETER IS CURRENTLY NOT CONSIDERED. CUDA GPUS ARE USED", "differentiable=trainable) self.denominator = self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant( w_denominator), grad_req='write' if trainable", "Q(x) = (1 + |b_0 * x + b_1 *", "(str): Version of Rational to use. Rational(x) = P(x)/Q(x), where", "= get_parameters( version, degrees, approx_func) # convert w_numerator and w_denominator", "to execute on cuda device. NOTE: THIS PARAMETER IS CURRENTLY", "x + b_2 * x^2 + ... + b_m *", "mxnet as mx from mxnet import initializer from mxnet.gluon import", "numerator (P) and denominator (Q). Default ``(5, 4)`` cuda (bool):", "stages self.numerator_length = len(w_numerator) self.denominator_length = len(w_denominator) self.training = trainable", "denominator, since we need them during # symbolic execution, but", "from rational.numpy import Rational as Rational_numpy rational_n = Rational_numpy(self.init_approximation, self.degrees,", "set rational activation function version self.rational_func = {'A': _version_a, 'B':", "to # version D self.training = trainable self.init_approximation = approx_func", "use. Rational(x) = P(x)/Q(x), where P(x) = (a_0 + a_1", "a_2 * x^2 + ... + a_n * x^n) and", "Returns a numpy version of this activation function. \"\"\" from", "cpu() # register and configure weights (numerator and denominator coefficients)", "self.rational_func = {'A': _version_a, 'B': _version_b, 'C': _version_c, 'D': _version_d}", "self.degrees, self.version) rational_n.numerator = self.numerator.data().asnumpy().tolist() rational_n.denominator = self.denominator.data().asnumpy().tolist() return rational_n", "else cpu() # register and configure weights (numerator and denominator", "implemented\" % version) def hybrid_forward(self, F, x, numerator, denominator): return", "rational.numpy import Rational as Rational_numpy rational_n = Rational_numpy(self.init_approximation, self.degrees, self.version)", "_version_c, 'D': _version_d} \\ .get(version) if self.rational_func is None: raise", "Learnable Rational activation functions with MXNET networks. \"\"\" import mxnet", "networks. \"\"\" import mxnet as mx from mxnet import initializer", "... + a_n * x^n) and `A`: Q(x) = (1", "in `rational.rationals_config.json`. Default: ``leaky_relu`` degrees (tuple of int): The degrees", "version) def hybrid_forward(self, F, x, numerator, denominator): return self.rational_func(F, x,", "during # symbolic execution, but are unable to retrieve them", "else 'null', differentiable=trainable) self.denominator = self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant( w_denominator), grad_req='write'", "noised coefficients b_i Default ``A`` trainable (bool): Whether the weights", "_version_c, _version_d from rational._base.rational_base import Rational_base class Rational(Rational_base, HybridBlock): \"\"\"", "``True`` Returns: HybridBlock: Rational hybrid block \"\"\" def __init__(self, approx_func='leaky_relu',", "4), cuda=False, version='A', trainable=True, **kwargs): super(Rational, self).__init__(**kwargs) # read initial", "happening, since unclear, how and why helpful) # self.device =", "Activation Function, inheriting from ``mxnet.gluon.HybridBlock``. Arguments: approx_func (str): The name", "since we need them during # symbolic execution, but are", "Default ``A`` trainable (bool): Whether the weights are trainable, i.e,", "IT IS POSSIBLE version (str): Version of Rational to use.", "len(w_denominator) self.training = trainable self.degrees = degrees self.version = version", "register whether function is trainable, since this information needs to", "degrees self.version = version self.init_approximation = approx_func # set specified", "whether to execute on cuda device. NOTE: THIS PARAMETER IS", "from mxnet.gluon import HybridBlock from rational.utils.get_weights import get_parameters from rational.mxnet.versions", "whether function is trainable, since this information needs to be", "Arguments: approx_func (str): The name of the approximated function for", "= (1 + |b_0 * x + b_1 * x^2", "len(w_numerator) self.denominator_length = len(w_denominator) self.training = trainable self.degrees = degrees", "mx.nd.array(w_numerator) w_denominator = mx.nd.array(w_denominator) # register the amount of weights", "self.name_scope(): self.numerator = self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant( w_numerator), grad_req='write' if trainable", "* x^2 + ... + a_n * x^n) and `A`:", "... + | b_m * x^{m+1}|) `B`: Q(x) = (1", "of the approximated function for initialisation. The different functions are", "+ a_1 * x + a_2 * x^2 + ...", "# register the amount of weights in numerator and denominator,", "Neural Networks using Learnable Rational activation functions with MXNET networks.", "_version_b, 'C': _version_c, 'D': _version_d} \\ .get(version) if self.rational_func is", "GPUS ARE USED WHEN IT IS POSSIBLE version (str): Version", "get_parameters( version, degrees, approx_func) # convert w_numerator and w_denominator to", "raise ValueError( \"rational activation function version %s not implemented\" %", "Whether the weights are trainable, i.e, if they are updated", "Rational_numpy rational_n = Rational_numpy(self.init_approximation, self.degrees, self.version) rational_n.numerator = self.numerator.data().asnumpy().tolist() rational_n.denominator", "= len(w_numerator) self.denominator_length = len(w_denominator) self.training = trainable self.degrees =", "function. \"\"\" from rational.numpy import Rational as Rational_numpy rational_n =", "self.version = version self.init_approximation = approx_func # set specified context", "where P(x) = (a_0 + a_1 * x + a_2", "b_m * x^{m+1}|) `B`: Q(x) = (1 + |b_0 *", "__init__(self, approx_func='leaky_relu', degrees=(5, 4), cuda=False, version='A', trainable=True, **kwargs): super(Rational, self).__init__(**kwargs)", "Default: ``leaky_relu`` degrees (tuple of int): The degrees of the", "def numpy(self): \"\"\" Returns a numpy version of this activation", "are updated during backward pass. Default ``True`` Returns: HybridBlock: Rational", "# convert w_numerator and w_denominator to mxnet arrays w_numerator =", "if they are updated during backward pass. Default ``True`` Returns:", "x + a_2 * x^2 + ... + a_n *", "weights are trainable, i.e, if they are updated during backward", "w_denominator to mxnet arrays w_numerator = mx.nd.array(w_numerator) w_denominator = mx.nd.array(w_denominator)", "IS POSSIBLE version (str): Version of Rational to use. Rational(x)", "rational._base.rational_base import Rational_base class Rational(Rational_base, HybridBlock): \"\"\" Rational Activation Function,", "denominator): return self.rational_func(F, x, numerator, denominator, self.training, self.numerator_length, self.denominator_length) def", "import mxnet as mx from mxnet import initializer from mxnet.gluon", "| b_1 * x^2| + ... + | b_m *", "Version of Rational to use. Rational(x) = P(x)/Q(x), where P(x)", "+ ... + b_m * x^{m + 1}|) `C`: Q(x)", "to retrieve them at later stages self.numerator_length = len(w_numerator) self.denominator_length", "|b_0 + b_1 * x + b_2 * x^2 +", "The name of the approximated function for initialisation. The different", "symbolic execution, but are unable to retrieve them at later", "you to create Rational Neural Networks using Learnable Rational activation", "version self.init_approximation = approx_func # set specified context (currently not", "x, numerator, denominator, self.training, self.numerator_length, self.denominator_length) def numpy(self): \"\"\" Returns", "later stages self.numerator_length = len(w_numerator) self.denominator_length = len(w_denominator) self.training =", "|b_0 * x| + | b_1 * x^2| + ...", "mxnet import initializer from mxnet.gluon import HybridBlock from rational.utils.get_weights import", "if trainable else 'null', differentiable=trainable) self.denominator = self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant(", "= degrees self.version = version self.init_approximation = approx_func # set", "this information needs to be passed to # version D", "= mx.nd.array(w_numerator) w_denominator = mx.nd.array(w_denominator) # register the amount of", "version of this activation function. \"\"\" from rational.numpy import Rational", "PARAMETER IS CURRENTLY NOT CONSIDERED. CUDA GPUS ARE USED WHEN", "from ``mxnet.gluon.HybridBlock``. Arguments: approx_func (str): The name of the approximated", "IS CURRENTLY NOT CONSIDERED. CUDA GPUS ARE USED WHEN IT", "mxnet.gluon import HybridBlock from rational.utils.get_weights import get_parameters from rational.mxnet.versions import", "if self.rational_func is None: raise ValueError( \"rational activation function version", "b_2 * x^2 + ... + b_m * x^m|) `D`:", "degrees, approx_func) # convert w_numerator and w_denominator to mxnet arrays", "w_denominator = get_parameters( version, degrees, approx_func) # convert w_numerator and", "None: raise ValueError( \"rational activation function version %s not implemented\"", "gpu() if cuda else cpu() # register and configure weights", "Rational_base class Rational(Rational_base, HybridBlock): \"\"\" Rational Activation Function, inheriting from", "for MXNET ======================================= This module allows you to create Rational", "* x + a_2 * x^2 + ... + a_n", "on cuda device. NOTE: THIS PARAMETER IS CURRENTLY NOT CONSIDERED.", "mxnet arrays w_numerator = mx.nd.array(w_numerator) w_denominator = mx.nd.array(w_denominator) # register", "approximated function for initialisation. The different functions are available in", "\\ .get(version) if self.rational_func is None: raise ValueError( \"rational activation", "read initial parameter configuration from external files w_numerator, w_denominator =", "+ b_m * x^m|) `D`: like `B` with noised coefficients", "super(Rational, self).__init__(**kwargs) # read initial parameter configuration from external files", "# self.device = gpu() if cuda else cpu() # register", "rational.mxnet.versions import _version_a, _version_b, _version_c, _version_d from rational._base.rational_base import Rational_base", "* x + b_1 * x^2 + ... + b_m", "the numerator (P) and denominator (Q). Default ``(5, 4)`` cuda", "with noised coefficients b_i Default ``A`` trainable (bool): Whether the", "return self.rational_func(F, x, numerator, denominator, self.training, self.numerator_length, self.denominator_length) def numpy(self):", "for initialisation. The different functions are available in `rational.rationals_config.json`. Default:", "ValueError( \"rational activation function version %s not implemented\" % version)", "``mxnet.gluon.HybridBlock``. Arguments: approx_func (str): The name of the approximated function", "available in `rational.rationals_config.json`. Default: ``leaky_relu`` degrees (tuple of int): The", "{'A': _version_a, 'B': _version_b, 'C': _version_c, 'D': _version_d} \\ .get(version)", "get_parameters from rational.mxnet.versions import _version_a, _version_b, _version_c, _version_d from rational._base.rational_base", "function version self.rational_func = {'A': _version_a, 'B': _version_b, 'C': _version_c,", "as mx from mxnet import initializer from mxnet.gluon import HybridBlock", "to be passed to # version D self.training = trainable", "module allows you to create Rational Neural Networks using Learnable", "different functions are available in `rational.rationals_config.json`. Default: ``leaky_relu`` degrees (tuple", "NOT CONSIDERED. CUDA GPUS ARE USED WHEN IT IS POSSIBLE", "x^2| + ... + | b_m * x^{m+1}|) `B`: Q(x)", "= {'A': _version_a, 'B': _version_b, 'C': _version_c, 'D': _version_d} \\", "`B`: Q(x) = (1 + |b_0 * x + b_1", "(1 + |b_0 * x + b_1 * x^2 +", "rational.utils.get_weights import get_parameters from rational.mxnet.versions import _version_a, _version_b, _version_c, _version_d", "\"\"\" def __init__(self, approx_func='leaky_relu', degrees=(5, 4), cuda=False, version='A', trainable=True, **kwargs):", "weights (numerator and denominator coefficients) with self.name_scope(): self.numerator = self.params.get(name='w_numerator',", "cuda device. NOTE: THIS PARAMETER IS CURRENTLY NOT CONSIDERED. CUDA", "name of the approximated function for initialisation. The different functions", "* x| + | b_1 * x^2| + ... +", "convert w_numerator and w_denominator to mxnet arrays w_numerator = mx.nd.array(w_numerator)", "# symbolic execution, but are unable to retrieve them at", "= gpu() if cuda else cpu() # register and configure", "needs to be passed to # version D self.training =", "version, degrees, approx_func) # convert w_numerator and w_denominator to mxnet", "is trainable, since this information needs to be passed to", "hybrid_forward(self, F, x, numerator, denominator): return self.rational_func(F, x, numerator, denominator,", "version %s not implemented\" % version) def hybrid_forward(self, F, x,", "<reponame>steven-lang/rational_activations \"\"\" Rational Activation Functions for MXNET ======================================= This module", "_version_d} \\ .get(version) if self.rational_func is None: raise ValueError( \"rational", "\"\"\" Rational Activation Function, inheriting from ``mxnet.gluon.HybridBlock``. Arguments: approx_func (str):", "files w_numerator, w_denominator = get_parameters( version, degrees, approx_func) # convert", "The different functions are available in `rational.rationals_config.json`. Default: ``leaky_relu`` degrees", "activation function. \"\"\" from rational.numpy import Rational as Rational_numpy rational_n", "**kwargs): super(Rational, self).__init__(**kwargs) # read initial parameter configuration from external", "... + b_m * x^m|) `D`: like `B` with noised", "self.numerator_length, self.denominator_length) def numpy(self): \"\"\" Returns a numpy version of", "= (1 + |b_0 * x| + | b_1 *", "Rational hybrid block \"\"\" def __init__(self, approx_func='leaky_relu', degrees=(5, 4), cuda=False,", "% version) def hybrid_forward(self, F, x, numerator, denominator): return self.rational_func(F,", "init=initializer.Constant( w_numerator), grad_req='write' if trainable else 'null', differentiable=trainable) self.denominator =", "Default ``True`` Returns: HybridBlock: Rational hybrid block \"\"\" def __init__(self,", "rational_n = Rational_numpy(self.init_approximation, self.degrees, self.version) rational_n.numerator = self.numerator.data().asnumpy().tolist() rational_n.denominator =", "approx_func # set rational activation function version self.rational_func = {'A':", "CONSIDERED. CUDA GPUS ARE USED WHEN IT IS POSSIBLE version", "self.training = trainable self.init_approximation = approx_func # set rational activation", "to create Rational Neural Networks using Learnable Rational activation functions", "and configure weights (numerator and denominator coefficients) with self.name_scope(): self.numerator", "self).__init__(**kwargs) # read initial parameter configuration from external files w_numerator,", "CURRENTLY NOT CONSIDERED. CUDA GPUS ARE USED WHEN IT IS", "specified context (currently not happening, since unclear, how and why", "degrees=(5, 4), cuda=False, version='A', trainable=True, **kwargs): super(Rational, self).__init__(**kwargs) # read", "= self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant( w_denominator), grad_req='write' if trainable else 'null',", "self.denominator_length = len(w_denominator) self.training = trainable self.degrees = degrees self.version", "from rational._base.rational_base import Rational_base class Rational(Rational_base, HybridBlock): \"\"\" Rational Activation", "w_denominator), grad_req='write' if trainable else 'null', differentiable=trainable) # register whether", "(P) and denominator (Q). Default ``(5, 4)`` cuda (bool): whether", "function is trainable, since this information needs to be passed", "grad_req='write' if trainable else 'null', differentiable=trainable) self.denominator = self.params.get(name='w_denominator', shape=(len(w_denominator),),", "= trainable self.init_approximation = approx_func # set rational activation function", "updated during backward pass. Default ``True`` Returns: HybridBlock: Rational hybrid", "create Rational Neural Networks using Learnable Rational activation functions with", "external files w_numerator, w_denominator = get_parameters( version, degrees, approx_func) #", "\"\"\" Returns a numpy version of this activation function. \"\"\"", "block \"\"\" def __init__(self, approx_func='leaky_relu', degrees=(5, 4), cuda=False, version='A', trainable=True,", "be passed to # version D self.training = trainable self.init_approximation", "+ | b_m * x^{m+1}|) `B`: Q(x) = (1 +", "approx_func # set specified context (currently not happening, since unclear,", "import Rational as Rational_numpy rational_n = Rational_numpy(self.init_approximation, self.degrees, self.version) rational_n.numerator", "USED WHEN IT IS POSSIBLE version (str): Version of Rational", "grad_req='write' if trainable else 'null', differentiable=trainable) # register whether function", "(a_0 + a_1 * x + a_2 * x^2 +", "coefficients b_i Default ``A`` trainable (bool): Whether the weights are", "Activation Functions for MXNET ======================================= This module allows you to", "x, numerator, denominator): return self.rational_func(F, x, numerator, denominator, self.training, self.numerator_length,", "like `B` with noised coefficients b_i Default ``A`` trainable (bool):", "in numerator and denominator, since we need them during #", "b_m * x^{m + 1}|) `C`: Q(x) = (0.1 +", "numerator and denominator, since we need them during # symbolic", "self.denominator_length) def numpy(self): \"\"\" Returns a numpy version of this", "to use. Rational(x) = P(x)/Q(x), where P(x) = (a_0 +", "at later stages self.numerator_length = len(w_numerator) self.denominator_length = len(w_denominator) self.training", "+ |b_0 * x| + | b_1 * x^2| +", "* x + b_2 * x^2 + ... + b_m", "``leaky_relu`` degrees (tuple of int): The degrees of the numerator", "and denominator, since we need them during # symbolic execution,", "function for initialisation. The different functions are available in `rational.rationals_config.json`.", "* x^2| + ... + | b_m * x^{m+1}|) `B`:", "why helpful) # self.device = gpu() if cuda else cpu()", "allows you to create Rational Neural Networks using Learnable Rational", "P(x)/Q(x), where P(x) = (a_0 + a_1 * x +", "differentiable=trainable) # register whether function is trainable, since this information", "trainable, since this information needs to be passed to #", "rational activation function version self.rational_func = {'A': _version_a, 'B': _version_b,", "Rational activation functions with MXNET networks. \"\"\" import mxnet as", "|b_0 * x + b_1 * x^2 + ... +", "b_m * x^m|) `D`: like `B` with noised coefficients b_i", "(0.1 + |b_0 + b_1 * x + b_2 *", "a_n * x^n) and `A`: Q(x) = (1 + |b_0", "i.e, if they are updated during backward pass. Default ``True``", "mx from mxnet import initializer from mxnet.gluon import HybridBlock from", "version (str): Version of Rational to use. Rational(x) = P(x)/Q(x),", "of int): The degrees of the numerator (P) and denominator", "+ ... + a_n * x^n) and `A`: Q(x) =", "+ b_1 * x + b_2 * x^2 + ...", "and w_denominator to mxnet arrays w_numerator = mx.nd.array(w_numerator) w_denominator =", "4)`` cuda (bool): whether to execute on cuda device. NOTE:", "1}|) `C`: Q(x) = (0.1 + |b_0 + b_1 *", "'C': _version_c, 'D': _version_d} \\ .get(version) if self.rational_func is None:", "and `A`: Q(x) = (1 + |b_0 * x| +", "trainable, i.e, if they are updated during backward pass. Default", "initialisation. The different functions are available in `rational.rationals_config.json`. Default: ``leaky_relu``", "of Rational to use. Rational(x) = P(x)/Q(x), where P(x) =", "shape=(len(w_numerator),), init=initializer.Constant( w_numerator), grad_req='write' if trainable else 'null', differentiable=trainable) self.denominator", "inheriting from ``mxnet.gluon.HybridBlock``. Arguments: approx_func (str): The name of the", "int): The degrees of the numerator (P) and denominator (Q).", "approx_func='leaky_relu', degrees=(5, 4), cuda=False, version='A', trainable=True, **kwargs): super(Rational, self).__init__(**kwargs) #", "else 'null', differentiable=trainable) # register whether function is trainable, since", "w_numerator), grad_req='write' if trainable else 'null', differentiable=trainable) self.denominator = self.params.get(name='w_denominator',", "version D self.training = trainable self.init_approximation = approx_func # set", "self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant( w_denominator), grad_req='write' if trainable else 'null', differentiable=trainable)", "# set specified context (currently not happening, since unclear, how", "w_numerator = mx.nd.array(w_numerator) w_denominator = mx.nd.array(w_denominator) # register the amount", "device. NOTE: THIS PARAMETER IS CURRENTLY NOT CONSIDERED. CUDA GPUS", "function version %s not implemented\" % version) def hybrid_forward(self, F,", "'null', differentiable=trainable) self.denominator = self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant( w_denominator), grad_req='write' if", "self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant( w_numerator), grad_req='write' if trainable else 'null', differentiable=trainable)", "b_i Default ``A`` trainable (bool): Whether the weights are trainable,", "import _version_a, _version_b, _version_c, _version_d from rational._base.rational_base import Rational_base class", "(currently not happening, since unclear, how and why helpful) #", "\"\"\" from rational.numpy import Rational as Rational_numpy rational_n = Rational_numpy(self.init_approximation,", "(tuple of int): The degrees of the numerator (P) and", "(bool): whether to execute on cuda device. NOTE: THIS PARAMETER", "execution, but are unable to retrieve them at later stages", "coefficients) with self.name_scope(): self.numerator = self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant( w_numerator), grad_req='write'", "+ b_1 * x^2 + ... + b_m * x^{m", "self.numerator = self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant( w_numerator), grad_req='write' if trainable else", "import get_parameters from rational.mxnet.versions import _version_a, _version_b, _version_c, _version_d from", "import HybridBlock from rational.utils.get_weights import get_parameters from rational.mxnet.versions import _version_a,", "b_1 * x + b_2 * x^2 + ... +", "\"\"\" Rational Activation Functions for MXNET ======================================= This module allows", "def __init__(self, approx_func='leaky_relu', degrees=(5, 4), cuda=False, version='A', trainable=True, **kwargs): super(Rational,", "the weights are trainable, i.e, if they are updated during", "trainable else 'null', differentiable=trainable) # register whether function is trainable,", "and why helpful) # self.device = gpu() if cuda else", "Functions for MXNET ======================================= This module allows you to create", "b_1 * x^2| + ... + | b_m * x^{m+1}|)", "import initializer from mxnet.gluon import HybridBlock from rational.utils.get_weights import get_parameters", "from external files w_numerator, w_denominator = get_parameters( version, degrees, approx_func)", "+ | b_1 * x^2| + ... + | b_m", "to mxnet arrays w_numerator = mx.nd.array(w_numerator) w_denominator = mx.nd.array(w_denominator) #", "denominator coefficients) with self.name_scope(): self.numerator = self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant( w_numerator),", "Returns: HybridBlock: Rational hybrid block \"\"\" def __init__(self, approx_func='leaky_relu', degrees=(5,", "Default ``(5, 4)`` cuda (bool): whether to execute on cuda", "self.init_approximation = approx_func # set specified context (currently not happening,", "not implemented\" % version) def hybrid_forward(self, F, x, numerator, denominator):", "activation function version %s not implemented\" % version) def hybrid_forward(self,", "if cuda else cpu() # register and configure weights (numerator", "= approx_func # set specified context (currently not happening, since", "register and configure weights (numerator and denominator coefficients) with self.name_scope():", "hybrid block \"\"\" def __init__(self, approx_func='leaky_relu', degrees=(5, 4), cuda=False, version='A',", "self.denominator = self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant( w_denominator), grad_req='write' if trainable else", "backward pass. Default ``True`` Returns: HybridBlock: Rational hybrid block \"\"\"", "functions are available in `rational.rationals_config.json`. Default: ``leaky_relu`` degrees (tuple of", "them at later stages self.numerator_length = len(w_numerator) self.denominator_length = len(w_denominator)", "functions with MXNET networks. \"\"\" import mxnet as mx from", "Rational(Rational_base, HybridBlock): \"\"\" Rational Activation Function, inheriting from ``mxnet.gluon.HybridBlock``. Arguments:", "mx.nd.array(w_denominator) # register the amount of weights in numerator and", "Rational_numpy(self.init_approximation, self.degrees, self.version) rational_n.numerator = self.numerator.data().asnumpy().tolist() rational_n.denominator = self.denominator.data().asnumpy().tolist() return", "This module allows you to create Rational Neural Networks using", "HybridBlock from rational.utils.get_weights import get_parameters from rational.mxnet.versions import _version_a, _version_b,", "since unclear, how and why helpful) # self.device = gpu()", "trainable (bool): Whether the weights are trainable, i.e, if they", "since this information needs to be passed to # version", "denominator (Q). Default ``(5, 4)`` cuda (bool): whether to execute", "arrays w_numerator = mx.nd.array(w_numerator) w_denominator = mx.nd.array(w_denominator) # register the", "and denominator coefficients) with self.name_scope(): self.numerator = self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant(", "+ b_m * x^{m + 1}|) `C`: Q(x) = (0.1", "(Q). Default ``(5, 4)`` cuda (bool): whether to execute on", "version self.rational_func = {'A': _version_a, 'B': _version_b, 'C': _version_c, 'D':", "``A`` trainable (bool): Whether the weights are trainable, i.e, if", "Networks using Learnable Rational activation functions with MXNET networks. \"\"\"", "_version_d from rational._base.rational_base import Rational_base class Rational(Rational_base, HybridBlock): \"\"\" Rational", "* x^{m+1}|) `B`: Q(x) = (1 + |b_0 * x", "class Rational(Rational_base, HybridBlock): \"\"\" Rational Activation Function, inheriting from ``mxnet.gluon.HybridBlock``.", "is None: raise ValueError( \"rational activation function version %s not", "approx_func (str): The name of the approximated function for initialisation.", "execute on cuda device. NOTE: THIS PARAMETER IS CURRENTLY NOT", "denominator, self.training, self.numerator_length, self.denominator_length) def numpy(self): \"\"\" Returns a numpy", "'null', differentiable=trainable) # register whether function is trainable, since this", "but are unable to retrieve them at later stages self.numerator_length", "| b_m * x^{m+1}|) `B`: Q(x) = (1 + |b_0", "trainable=True, **kwargs): super(Rational, self).__init__(**kwargs) # read initial parameter configuration from", "+ ... + | b_m * x^{m+1}|) `B`: Q(x) =", "'D': _version_d} \\ .get(version) if self.rational_func is None: raise ValueError(", "x^2 + ... + b_m * x^{m + 1}|) `C`:", "= trainable self.degrees = degrees self.version = version self.init_approximation =", "`A`: Q(x) = (1 + |b_0 * x| + |", "\"\"\" import mxnet as mx from mxnet import initializer from", ".get(version) if self.rational_func is None: raise ValueError( \"rational activation function", "# register and configure weights (numerator and denominator coefficients) with", "self.device = gpu() if cuda else cpu() # register and", "# version D self.training = trainable self.init_approximation = approx_func #", "of the numerator (P) and denominator (Q). Default ``(5, 4)``", "set specified context (currently not happening, since unclear, how and", "+ |b_0 + b_1 * x + b_2 * x^2", "the amount of weights in numerator and denominator, since we", "unable to retrieve them at later stages self.numerator_length = len(w_numerator)", "them during # symbolic execution, but are unable to retrieve", "+ b_2 * x^2 + ... + b_m * x^m|)", "def hybrid_forward(self, F, x, numerator, denominator): return self.rational_func(F, x, numerator,", "_version_b, _version_c, _version_d from rational._base.rational_base import Rational_base class Rational(Rational_base, HybridBlock):", "NOTE: THIS PARAMETER IS CURRENTLY NOT CONSIDERED. CUDA GPUS ARE", "= self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant( w_numerator), grad_req='write' if trainable else 'null',", "b_1 * x^2 + ... + b_m * x^{m +", "x^2 + ... + b_m * x^m|) `D`: like `B`", "x^{m+1}|) `B`: Q(x) = (1 + |b_0 * x +", "F, x, numerator, denominator): return self.rational_func(F, x, numerator, denominator, self.training,", "unclear, how and why helpful) # self.device = gpu() if", "numerator, denominator): return self.rational_func(F, x, numerator, denominator, self.training, self.numerator_length, self.denominator_length)", "numerator, denominator, self.training, self.numerator_length, self.denominator_length) def numpy(self): \"\"\" Returns a", "initializer from mxnet.gluon import HybridBlock from rational.utils.get_weights import get_parameters from", "self.degrees = degrees self.version = version self.init_approximation = approx_func #", "Q(x) = (1 + |b_0 * x| + | b_1", "version='A', trainable=True, **kwargs): super(Rational, self).__init__(**kwargs) # read initial parameter configuration", "configure weights (numerator and denominator coefficients) with self.name_scope(): self.numerator =", "The degrees of the numerator (P) and denominator (Q). Default", "x^m|) `D`: like `B` with noised coefficients b_i Default ``A``", "Rational Neural Networks using Learnable Rational activation functions with MXNET", "self.training, self.numerator_length, self.denominator_length) def numpy(self): \"\"\" Returns a numpy version", "w_denominator = mx.nd.array(w_denominator) # register the amount of weights in", "as Rational_numpy rational_n = Rational_numpy(self.init_approximation, self.degrees, self.version) rational_n.numerator = self.numerator.data().asnumpy().tolist()" ]
[ "+ yy - 2 * torch.matmul(x, y.t()) dist = dist.clamp(min=1e-12).sqrt()", ": https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x, y): \"\"\"Computes Euclidean distance.\"\"\" m, n", "+ 1e-12 return num / denom # Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py", "F.normalize(x, dim=1) y = F.normalize(y, dim=1) dist = 2 -", "xx + yy - 2 * torch.matmul(x, y.t()) dist =", "num / denom # Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x, y):", "\"\"\"Performs L2-Norm.\"\"\" num = x denom = torch.norm(x, 2, axis,", "2 * torch.matmul(x, y.t()) dist = dist.clamp(min=1e-12).sqrt() return dist def", "y): \"\"\"Computes Euclidean distance.\"\"\" m, n = x.size(0), y.size(0) xx", "1e-12 return num / denom # Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def", "m).t() dist = xx + yy - 2 * torch.matmul(x,", "/ denom # Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x, y): \"\"\"Computes", "2).sum(1, keepdim=True).expand(m, n) yy = torch.pow(x, 2).sum(1, keepdim=True).expand(m, m).t() dist", "= F.normalize(x, dim=1) y = F.normalize(y, dim=1) dist = 2", "dist def cosine_dist(x, y): \"\"\"Computes Cosine Distance.\"\"\" x = F.normalize(x,", "\"\"\"Utils for criterion.\"\"\" import torch import torch.nn.functional as F def", "torch.pow(x, 2).sum(1, keepdim=True).expand(m, m).t() dist = xx + yy -", "torch import torch.nn.functional as F def normalize(x, axis=-1): \"\"\"Performs L2-Norm.\"\"\"", "= torch.pow(x, 2).sum(1, keepdim=True).expand(m, n) yy = torch.pow(x, 2).sum(1, keepdim=True).expand(m,", "axis, keepdim=True).expand_as(x) + 1e-12 return num / denom # Source", "= dist.clamp(min=1e-12).sqrt() return dist def cosine_dist(x, y): \"\"\"Computes Cosine Distance.\"\"\"", "m, n = x.size(0), y.size(0) xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m,", "= x.size(0), y.size(0) xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n) yy", "as F def normalize(x, axis=-1): \"\"\"Performs L2-Norm.\"\"\" num = x", "= F.normalize(y, dim=1) dist = 2 - 2 * torch.mm(x,", "* torch.matmul(x, y.t()) dist = dist.clamp(min=1e-12).sqrt() return dist def cosine_dist(x,", "\"\"\"Computes Cosine Distance.\"\"\" x = F.normalize(x, dim=1) y = F.normalize(y,", "yy - 2 * torch.matmul(x, y.t()) dist = dist.clamp(min=1e-12).sqrt() return", "return num / denom # Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x,", "= x denom = torch.norm(x, 2, axis, keepdim=True).expand_as(x) + 1e-12", "x denom = torch.norm(x, 2, axis, keepdim=True).expand_as(x) + 1e-12 return", "y): \"\"\"Computes Cosine Distance.\"\"\" x = F.normalize(x, dim=1) y =", "keepdim=True).expand(m, m).t() dist = xx + yy - 2 *", "for criterion.\"\"\" import torch import torch.nn.functional as F def normalize(x,", "y.t()) dist = dist.clamp(min=1e-12).sqrt() return dist def cosine_dist(x, y): \"\"\"Computes", "import torch.nn.functional as F def normalize(x, axis=-1): \"\"\"Performs L2-Norm.\"\"\" num", "y = F.normalize(y, dim=1) dist = 2 - 2 *", "num = x denom = torch.norm(x, 2, axis, keepdim=True).expand_as(x) +", "yy = torch.pow(x, 2).sum(1, keepdim=True).expand(m, m).t() dist = xx +", "F def normalize(x, axis=-1): \"\"\"Performs L2-Norm.\"\"\" num = x denom", "x = F.normalize(x, dim=1) y = F.normalize(y, dim=1) dist =", "dim=1) y = F.normalize(y, dim=1) dist = 2 - 2", "torch.pow(x, 2).sum(1, keepdim=True).expand(m, n) yy = torch.pow(x, 2).sum(1, keepdim=True).expand(m, m).t()", "torch.norm(x, 2, axis, keepdim=True).expand_as(x) + 1e-12 return num / denom", "# Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x, y): \"\"\"Computes Euclidean distance.\"\"\"", "xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n) yy = torch.pow(x, 2).sum(1,", "\"\"\"Computes Euclidean distance.\"\"\" m, n = x.size(0), y.size(0) xx =", "denom = torch.norm(x, 2, axis, keepdim=True).expand_as(x) + 1e-12 return num", "distance.\"\"\" m, n = x.size(0), y.size(0) xx = torch.pow(x, 2).sum(1,", "= torch.pow(x, 2).sum(1, keepdim=True).expand(m, m).t() dist = xx + yy", "def euclidean_dist(x, y): \"\"\"Computes Euclidean distance.\"\"\" m, n = x.size(0),", "- 2 * torch.matmul(x, y.t()) dist = dist.clamp(min=1e-12).sqrt() return dist", "torch.matmul(x, y.t()) dist = dist.clamp(min=1e-12).sqrt() return dist def cosine_dist(x, y):", "euclidean_dist(x, y): \"\"\"Computes Euclidean distance.\"\"\" m, n = x.size(0), y.size(0)", "keepdim=True).expand_as(x) + 1e-12 return num / denom # Source :", "criterion.\"\"\" import torch import torch.nn.functional as F def normalize(x, axis=-1):", "dist = dist.clamp(min=1e-12).sqrt() return dist def cosine_dist(x, y): \"\"\"Computes Cosine", "= torch.norm(x, 2, axis, keepdim=True).expand_as(x) + 1e-12 return num /", "<filename>torchflare/criterion/utils.py<gh_stars>1-10 \"\"\"Utils for criterion.\"\"\" import torch import torch.nn.functional as F", "import torch import torch.nn.functional as F def normalize(x, axis=-1): \"\"\"Performs", "return dist def cosine_dist(x, y): \"\"\"Computes Cosine Distance.\"\"\" x =", "Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x, y): \"\"\"Computes Euclidean distance.\"\"\" m,", "dist.clamp(min=1e-12).sqrt() return dist def cosine_dist(x, y): \"\"\"Computes Cosine Distance.\"\"\" x", "y.size(0) xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n) yy = torch.pow(x,", "x.size(0), y.size(0) xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n) yy =", "L2-Norm.\"\"\" num = x denom = torch.norm(x, 2, axis, keepdim=True).expand_as(x)", "Euclidean distance.\"\"\" m, n = x.size(0), y.size(0) xx = torch.pow(x,", "keepdim=True).expand(m, n) yy = torch.pow(x, 2).sum(1, keepdim=True).expand(m, m).t() dist =", "Distance.\"\"\" x = F.normalize(x, dim=1) y = F.normalize(y, dim=1) dist", "dist = 2 - 2 * torch.mm(x, y.t()) return dist", "F.normalize(y, dim=1) dist = 2 - 2 * torch.mm(x, y.t())", "2).sum(1, keepdim=True).expand(m, m).t() dist = xx + yy - 2", "Cosine Distance.\"\"\" x = F.normalize(x, dim=1) y = F.normalize(y, dim=1)", "https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x, y): \"\"\"Computes Euclidean distance.\"\"\" m, n =", "denom # Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x, y): \"\"\"Computes Euclidean", "dim=1) dist = 2 - 2 * torch.mm(x, y.t()) return", "2, axis, keepdim=True).expand_as(x) + 1e-12 return num / denom #", "def cosine_dist(x, y): \"\"\"Computes Cosine Distance.\"\"\" x = F.normalize(x, dim=1)", "axis=-1): \"\"\"Performs L2-Norm.\"\"\" num = x denom = torch.norm(x, 2,", "n) yy = torch.pow(x, 2).sum(1, keepdim=True).expand(m, m).t() dist = xx", "dist = xx + yy - 2 * torch.matmul(x, y.t())", "def normalize(x, axis=-1): \"\"\"Performs L2-Norm.\"\"\" num = x denom =", "torch.nn.functional as F def normalize(x, axis=-1): \"\"\"Performs L2-Norm.\"\"\" num =", "normalize(x, axis=-1): \"\"\"Performs L2-Norm.\"\"\" num = x denom = torch.norm(x,", "= xx + yy - 2 * torch.matmul(x, y.t()) dist", "n = x.size(0), y.size(0) xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n)", "cosine_dist(x, y): \"\"\"Computes Cosine Distance.\"\"\" x = F.normalize(x, dim=1) y" ]
[ "sbahn_munich integration\"\"\" line_dict = { \"name\": \"S3\", \"color\": \"#333333\", \"text_color\":", "integration\"\"\" line_dict = { \"name\": \"S3\", \"color\": \"#333333\", \"text_color\": \"#444444\",", "\"\"\"Tests for the sbahn_munich integration\"\"\" line_dict = { \"name\": \"S3\",", "line_dict = { \"name\": \"S3\", \"color\": \"#333333\", \"text_color\": \"#444444\", }", "for the sbahn_munich integration\"\"\" line_dict = { \"name\": \"S3\", \"color\":", "the sbahn_munich integration\"\"\" line_dict = { \"name\": \"S3\", \"color\": \"#333333\"," ]
[ "flask import render_template from flask_login import login_required homestack = Blueprint(\"homestack\",", "python2.7 # -*- coding: latin-1 -*- from flask import Blueprint", "# -*- coding: latin-1 -*- from flask import Blueprint from", "coding: latin-1 -*- from flask import Blueprint from flask import", "from flask import current_app from flask import render_template from flask_login", "import login_required homestack = Blueprint(\"homestack\", __name__, url_prefix=\"/homestack\") @homestack.route(\"/\", methods=[\"GET\"]) @login_required", "current_app from flask import render_template from flask_login import login_required homestack", "flask import Blueprint from flask import current_app from flask import", "-*- from flask import Blueprint from flask import current_app from", "-*- coding: latin-1 -*- from flask import Blueprint from flask", "import render_template from flask_login import login_required homestack = Blueprint(\"homestack\", __name__,", "/usr/bin/env python2.7 # -*- coding: latin-1 -*- from flask import", "#! /usr/bin/env python2.7 # -*- coding: latin-1 -*- from flask", "render_template from flask_login import login_required homestack = Blueprint(\"homestack\", __name__, url_prefix=\"/homestack\")", "import Blueprint from flask import current_app from flask import render_template", "import current_app from flask import render_template from flask_login import login_required", "latin-1 -*- from flask import Blueprint from flask import current_app", "from flask_login import login_required homestack = Blueprint(\"homestack\", __name__, url_prefix=\"/homestack\") @homestack.route(\"/\",", "Blueprint(\"homestack\", __name__, url_prefix=\"/homestack\") @homestack.route(\"/\", methods=[\"GET\"]) @login_required def home(): return render_template(\"homestack/home.html\")", "flask import current_app from flask import render_template from flask_login import", "<reponame>geudrik/hautomation #! /usr/bin/env python2.7 # -*- coding: latin-1 -*- from", "from flask import render_template from flask_login import login_required homestack =", "Blueprint from flask import current_app from flask import render_template from", "from flask import Blueprint from flask import current_app from flask", "flask_login import login_required homestack = Blueprint(\"homestack\", __name__, url_prefix=\"/homestack\") @homestack.route(\"/\", methods=[\"GET\"])", "login_required homestack = Blueprint(\"homestack\", __name__, url_prefix=\"/homestack\") @homestack.route(\"/\", methods=[\"GET\"]) @login_required def", "homestack = Blueprint(\"homestack\", __name__, url_prefix=\"/homestack\") @homestack.route(\"/\", methods=[\"GET\"]) @login_required def home():", "= Blueprint(\"homestack\", __name__, url_prefix=\"/homestack\") @homestack.route(\"/\", methods=[\"GET\"]) @login_required def home(): return" ]
[ "established on :py:class:`StripeModelForm` \"\"\" class Meta: model = Supporter fields", "urls_enabled' }), 'site_url': forms.TextInput(attrs={ 'data-bind': 'value: site_url, enable: urls_enabled' }),", "{ 'public': _('Your name and image will be displayed on", "a receipt'), 'logo_url': _(\"URL of your company's logo, images should", "you a receipt'), 'logo_url': _(\"URL of your company's logo, images", "clean up logo < $200\"\"\" dollars = self.cleaned_data['dollars'] if dollars", "Sustained Engineering', receipt_email=self.cleaned_data['email'] ) def save(self, commit=True): supporter = super(SupporterForm,", "receipt_email=self.cleaned_data['email'] ) def save(self, commit=True): supporter = super(SupporterForm, self).save(commit) if", "donating over $400 can specify a logo URL and site", "payment form, giving fields for credit card number, expiry, and", "save(self, commit=True): supporter = super(SupporterForm, self).save(commit) if commit and self.user", "The proper Knockout data bindings are established on :py:class:`StripeModelForm` \"\"\"", "less\"), 'dollars': _('Companies donating over $400 can specify a logo", "value: card_digits' }), } last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name =", "'dollars', 'logo_url', 'site_url', 'public', ) labels = { 'public': _('Make", "up form This extends the basic payment form, giving fields", "forms.TextInput(attrs={ 'data-bind': 'value: site_url, enable: urls_enabled' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind':", "last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email = forms.CharField(required=True)", "= { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'logo_url': forms.TextInput(attrs={", "self).__init__(*args, **kwargs) def validate_stripe(self): \"\"\"Call stripe for payment (not ideal", "from readthedocs.payments.utils import stripe from .models import Supporter log =", "email = forms.CharField(required=True) def validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd',", "'data-bind': 'value: dollars' }), 'logo_url': forms.TextInput(attrs={ 'data-bind': 'value: logo_url, enable:", "if dollars < 200: self.cleaned_data['logo_url'] = None self.cleaned_data['site_url'] = None", "forms from django.conf import settings from django.utils.translation import ugettext_lazy as", "fields = ( 'last_4_digits', 'name', 'email', 'dollars', ) help_texts =", "StripeResourceMixin from readthedocs.payments.utils import stripe from .models import Supporter log", "class Meta: model = Supporter fields = ( 'last_4_digits', 'name',", "send you a receipt'), } widgets = { 'dollars': forms.HiddenInput(attrs={", "from django.conf import settings from django.utils.translation import ugettext_lazy as _", "{ 'public': _('Make this donation public'), } help_texts = {", "_(\"URL of your company's logo, images should be 300x300 pixels", "commit=True): supporter = super(SupporterForm, self).save(commit) if commit and self.user is", "supporter class EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment form for ethical ads This", "for RTD donations\"\"\" import logging from django import forms from", "class EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment form for ethical ads This extends", "self.user supporter.save() return supporter class EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment form for", "100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sponsorship Payment', receipt_email=self.cleaned_data['email'] )", "source=self.cleaned_data['stripe_token'], description='Read the Docs Sustained Engineering', receipt_email=self.cleaned_data['email'] ) def save(self,", "name = forms.CharField(required=True) email = forms.CharField(required=True) def validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars'])", "\"\"\"Payment form for ethical ads This extends the basic payment", "images should be 300x300 pixels or less\"), 'dollars': _('Companies donating", "def validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the", "* 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sponsorship Payment', receipt_email=self.cleaned_data['email']", "* 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sustained Engineering', receipt_email=self.cleaned_data['email']", "card_digits, value: card_digits' }), } last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name", "100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sustained Engineering', receipt_email=self.cleaned_data['email'] )", "None and self.user.is_authenticated(): supporter.user = self.user supporter.save() return supporter class", "form This extends the basic payment form, giving fields for", "and image will be displayed on the donation page'), 'email':", "and site link'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind':", "'logo_url': forms.TextInput(attrs={ 'data-bind': 'value: logo_url, enable: urls_enabled' }), 'site_url': forms.TextInput(attrs={", "ethical ads This extends the basic payment form, giving fields", "help_texts = { 'public': _('Your name and image will be", "Knockout data bindings are established on :py:class:`StripeModelForm` \"\"\" class Meta:", "= Supporter fields = ( 'last_4_digits', 'name', 'email', 'dollars', )", "or less\"), 'dollars': _('Companies donating over $400 can specify a", "Gravatar and so we can send you a receipt'), 'logo_url':", "site_url, enable: urls_enabled' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value:", "django import forms from django.conf import settings from django.utils.translation import", "here) and clean up logo < $200\"\"\" dollars = self.cleaned_data['dollars']", "{ 'email': _('Your email is used so we can send", "you a receipt'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind':", "used so we can send you a receipt'), } widgets", "donation page'), 'email': _('Your email is used for Gravatar and", "the Docs Sustained Engineering', receipt_email=self.cleaned_data['email'] ) def save(self, commit=True): supporter", "StripeModelForm): \"\"\"Donation support sign up form This extends the basic", "= logging.getLogger(__name__) class SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation support sign up form", "'name', 'email', 'dollars', ) help_texts = { 'email': _('Your email", "giving fields for credit card number, expiry, and CVV. The", "credit card number, expiry, and CVV. The proper Knockout data", "'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit:", "\"\"\"Forms for RTD donations\"\"\" import logging from django import forms", "{ 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'logo_url': forms.TextInput(attrs={ 'data-bind':", "we can send you a receipt'), 'logo_url': _(\"URL of your", "'email': _('Your email is used so we can send you", "description='Read the Docs Sustained Engineering', receipt_email=self.cleaned_data['email'] ) def save(self, commit=True):", "= ( 'last_4_digits', 'name', 'email', 'dollars', ) help_texts = {", "basic payment form, giving fields for credit card number, expiry,", "will be displayed on the donation page'), 'email': _('Your email", "pixels or less\"), 'dollars': _('Companies donating over $400 can specify", "self.user = kwargs.pop('user') super(SupporterForm, self).__init__(*args, **kwargs) def validate_stripe(self): \"\"\"Call stripe", "from django.utils.translation import ugettext_lazy as _ from readthedocs.payments.forms import StripeModelForm,", "= { 'public': _('Your name and image will be displayed", "= None stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the", "the donation page'), 'email': _('Your email is used for Gravatar", "used for Gravatar and so we can send you a", "and self.user is not None and self.user.is_authenticated(): supporter.user = self.user", "company's logo, images should be 300x300 pixels or less\"), 'dollars':", "super(SupporterForm, self).__init__(*args, **kwargs) def validate_stripe(self): \"\"\"Call stripe for payment (not", "help_texts = { 'email': _('Your email is used so we", "forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits' }), } last_4_digits =", "200: self.cleaned_data['logo_url'] = None self.cleaned_data['site_url'] = None stripe.Charge.create( amount=int(self.cleaned_data['dollars']) *", "Supporter log = logging.getLogger(__name__) class SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation support sign", "we can send you a receipt'), } widgets = {", "= forms.CharField(required=True) email = forms.CharField(required=True) def __init__(self, *args, **kwargs): self.user", "forms.CharField(required=True) email = forms.CharField(required=True) def validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100,", "import forms from django.conf import settings from django.utils.translation import ugettext_lazy", "a receipt'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value:", "'last_4_digits', 'name', 'email', 'dollars', ) help_texts = { 'email': _('Your", "forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email = forms.CharField(required=True) def validate_stripe(self):", "sign up form This extends the basic payment form, giving", "settings from django.utils.translation import ugettext_lazy as _ from readthedocs.payments.forms import", "$200\"\"\" dollars = self.cleaned_data['dollars'] if dollars < 200: self.cleaned_data['logo_url'] =", "$400 can specify a logo URL and site link'), }", "card number, expiry, and CVV. The proper Knockout data bindings", "and clean up logo < $200\"\"\" dollars = self.cleaned_data['dollars'] if", "'value: dollars' }), 'logo_url': forms.TextInput(attrs={ 'data-bind': 'value: logo_url, enable: urls_enabled'", "} last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email =", "dollars' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits' }),", "'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'logo_url': forms.TextInput(attrs={ 'data-bind': 'value:", "dollars < 200: self.cleaned_data['logo_url'] = None self.cleaned_data['site_url'] = None stripe.Charge.create(", "fields = ( 'last_4_digits', 'name', 'email', 'dollars', 'logo_url', 'site_url', 'public',", "for ethical ads This extends the basic payment form, giving", "proper Knockout data bindings are established on :py:class:`StripeModelForm` \"\"\" class", "kwargs.pop('user') super(SupporterForm, self).__init__(*args, **kwargs) def validate_stripe(self): \"\"\"Call stripe for payment", "'public', ) labels = { 'public': _('Make this donation public'),", "is not None and self.user.is_authenticated(): supporter.user = self.user supporter.save() return", "donation public'), } help_texts = { 'public': _('Your name and", "is used for Gravatar and so we can send you", "image will be displayed on the donation page'), 'email': _('Your", "is used so we can send you a receipt'), }", "import StripeModelForm, StripeResourceMixin from readthedocs.payments.utils import stripe from .models import", "_('Your email is used so we can send you a", "as _ from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin from readthedocs.payments.utils import", "send you a receipt'), 'logo_url': _(\"URL of your company's logo,", "supporter = super(SupporterForm, self).save(commit) if commit and self.user is not", "def __init__(self, *args, **kwargs): self.user = kwargs.pop('user') super(SupporterForm, self).__init__(*args, **kwargs)", "**kwargs) def validate_stripe(self): \"\"\"Call stripe for payment (not ideal here)", "donations\"\"\" import logging from django import forms from django.conf import", "the basic payment form, giving fields for credit card number,", "labels = { 'public': _('Make this donation public'), } help_texts", "self.user.is_authenticated(): supporter.user = self.user supporter.save() return supporter class EthicalAdForm(StripeResourceMixin, StripeModelForm):", "Meta: model = Supporter fields = ( 'last_4_digits', 'name', 'email',", "over $400 can specify a logo URL and site link'),", "supporter.user = self.user supporter.save() return supporter class EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment", "This extends the basic payment form, giving fields for credit", "form for ethical ads This extends the basic payment form,", "*args, **kwargs): self.user = kwargs.pop('user') super(SupporterForm, self).__init__(*args, **kwargs) def validate_stripe(self):", "stripe for payment (not ideal here) and clean up logo", "= kwargs.pop('user') super(SupporterForm, self).__init__(*args, **kwargs) def validate_stripe(self): \"\"\"Call stripe for", "( 'last_4_digits', 'name', 'email', 'dollars', 'logo_url', 'site_url', 'public', ) labels", "= { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'last_4_digits': forms.TextInput(attrs={", "amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sustained Engineering',", "so we can send you a receipt'), 'logo_url': _(\"URL of", "urls_enabled' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits' }),", "return supporter class EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment form for ethical ads", "= forms.CharField(required=True) def __init__(self, *args, **kwargs): self.user = kwargs.pop('user') super(SupporterForm,", "readthedocs.payments.utils import stripe from .models import Supporter log = logging.getLogger(__name__)", "email is used so we can send you a receipt'),", "'dollars': _('Companies donating over $400 can specify a logo URL", ":py:class:`StripeModelForm` \"\"\" class Meta: model = Supporter fields = (", "'site_url', 'public', ) labels = { 'public': _('Make this donation", "for Gravatar and so we can send you a receipt'),", "RTD donations\"\"\" import logging from django import forms from django.conf", "log = logging.getLogger(__name__) class SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation support sign up", "'valueInit: card_digits, value: card_digits' }), } last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True)", "import logging from django import forms from django.conf import settings", "ugettext_lazy as _ from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin from readthedocs.payments.utils", "required=True) name = forms.CharField(required=True) email = forms.CharField(required=True) def __init__(self, *args,", "( 'last_4_digits', 'name', 'email', 'dollars', ) help_texts = { 'email':", "forms.CharField(required=True) def validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read", "'email': _('Your email is used for Gravatar and so we", "receipt'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars'", "= forms.CharField(required=True) def validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'],", "forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits,", "can send you a receipt'), } widgets = { 'dollars':", "supporter.save() return supporter class EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment form for ethical", "logo, images should be 300x300 pixels or less\"), 'dollars': _('Companies", "stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sponsorship", "if commit and self.user is not None and self.user.is_authenticated(): supporter.user", "django.utils.translation import ugettext_lazy as _ from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin", "'data-bind': 'value: logo_url, enable: urls_enabled' }), 'site_url': forms.TextInput(attrs={ 'data-bind': 'value:", "logging.getLogger(__name__) class SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation support sign up form This", "= ( 'last_4_digits', 'name', 'email', 'dollars', 'logo_url', 'site_url', 'public', )", "forms.TextInput(attrs={ 'data-bind': 'value: logo_url, enable: urls_enabled' }), 'site_url': forms.TextInput(attrs={ 'data-bind':", "for payment (not ideal here) and clean up logo <", "stripe from .models import Supporter log = logging.getLogger(__name__) class SupporterForm(StripeResourceMixin,", "forms.CharField(required=True) def __init__(self, *args, **kwargs): self.user = kwargs.pop('user') super(SupporterForm, self).__init__(*args,", "enable: urls_enabled' }), 'site_url': forms.TextInput(attrs={ 'data-bind': 'value: site_url, enable: urls_enabled'", "import Supporter log = logging.getLogger(__name__) class SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation support", "data bindings are established on :py:class:`StripeModelForm` \"\"\" class Meta: model", "None self.cleaned_data['site_url'] = None stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'],", "and CVV. The proper Knockout data bindings are established on", "email is used for Gravatar and so we can send", "name = forms.CharField(required=True) email = forms.CharField(required=True) def __init__(self, *args, **kwargs):", "of your company's logo, images should be 300x300 pixels or", "validate_stripe(self): \"\"\"Call stripe for payment (not ideal here) and clean", "link'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars'", "validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs", "amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sponsorship Payment',", "}), 'site_url': forms.TextInput(attrs={ 'data-bind': 'value: site_url, enable: urls_enabled' }), 'last_4_digits':", "'public': _('Make this donation public'), } help_texts = { 'public':", "logo URL and site link'), } widgets = { 'dollars':", "django.conf import settings from django.utils.translation import ugettext_lazy as _ from", "widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'last_4_digits':", "}), 'logo_url': forms.TextInput(attrs={ 'data-bind': 'value: logo_url, enable: urls_enabled' }), 'site_url':", "def validate_stripe(self): \"\"\"Call stripe for payment (not ideal here) and", "SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation support sign up form This extends the", "= self.user supporter.save() return supporter class EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment form", "_('Companies donating over $400 can specify a logo URL and", "a logo URL and site link'), } widgets = {", "be displayed on the donation page'), 'email': _('Your email is", "'public': _('Your name and image will be displayed on the", "receipt'), 'logo_url': _(\"URL of your company's logo, images should be", "enable: urls_enabled' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits'", "site link'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value:", "can send you a receipt'), 'logo_url': _(\"URL of your company's", "'name', 'email', 'dollars', 'logo_url', 'site_url', 'public', ) labels = {", "= self.cleaned_data['dollars'] if dollars < 200: self.cleaned_data['logo_url'] = None self.cleaned_data['site_url']", "= { 'email': _('Your email is used so we can", "forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email = forms.CharField(required=True) def __init__(self,", "specify a logo URL and site link'), } widgets =", "for credit card number, expiry, and CVV. The proper Knockout", "= { 'public': _('Make this donation public'), } help_texts =", "currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sustained Engineering', receipt_email=self.cleaned_data['email'] ) def", "'last_4_digits', 'name', 'email', 'dollars', 'logo_url', 'site_url', 'public', ) labels =", "super(SupporterForm, self).save(commit) if commit and self.user is not None and", "'data-bind': 'valueInit: card_digits, value: card_digits' }), } last_4_digits = forms.CharField(widget=forms.HiddenInput(),", "from .models import Supporter log = logging.getLogger(__name__) class SupporterForm(StripeResourceMixin, StripeModelForm):", "ideal here) and clean up logo < $200\"\"\" dollars =", "def save(self, commit=True): supporter = super(SupporterForm, self).save(commit) if commit and", "logging from django import forms from django.conf import settings from", "< 200: self.cleaned_data['logo_url'] = None self.cleaned_data['site_url'] = None stripe.Charge.create( amount=int(self.cleaned_data['dollars'])", "widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'logo_url':", "form, giving fields for credit card number, expiry, and CVV.", "are established on :py:class:`StripeModelForm` \"\"\" class Meta: model = Supporter", "so we can send you a receipt'), } widgets =", "self).save(commit) if commit and self.user is not None and self.user.is_authenticated():", "CVV. The proper Knockout data bindings are established on :py:class:`StripeModelForm`", "from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin from readthedocs.payments.utils import stripe from", "< $200\"\"\" dollars = self.cleaned_data['dollars'] if dollars < 200: self.cleaned_data['logo_url']", "commit and self.user is not None and self.user.is_authenticated(): supporter.user =", "= Supporter fields = ( 'last_4_digits', 'name', 'email', 'dollars', 'logo_url',", "readthedocs.payments.forms import StripeModelForm, StripeResourceMixin from readthedocs.payments.utils import stripe from .models", "bindings are established on :py:class:`StripeModelForm` \"\"\" class Meta: model =", "\"\"\" class Meta: model = Supporter fields = ( 'last_4_digits',", "displayed on the donation page'), 'email': _('Your email is used", "'value: logo_url, enable: urls_enabled' }), 'site_url': forms.TextInput(attrs={ 'data-bind': 'value: site_url,", ") def save(self, commit=True): supporter = super(SupporterForm, self).save(commit) if commit", "300x300 pixels or less\"), 'dollars': _('Companies donating over $400 can", "'data-bind': 'value: site_url, enable: urls_enabled' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit:", "'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits' }), } last_4_digits", "**kwargs): self.user = kwargs.pop('user') super(SupporterForm, self).__init__(*args, **kwargs) def validate_stripe(self): \"\"\"Call", ") labels = { 'public': _('Make this donation public'), }", "EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment form for ethical ads This extends the", "import stripe from .models import Supporter log = logging.getLogger(__name__) class", "extends the basic payment form, giving fields for credit card", "'value: site_url, enable: urls_enabled' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits,", "public'), } help_texts = { 'public': _('Your name and image", "card_digits' }), } last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True)", "}), } last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email", "logo < $200\"\"\" dollars = self.cleaned_data['dollars'] if dollars < 200:", "_('Make this donation public'), } help_texts = { 'public': _('Your", "}), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits' }), }", "__init__(self, *args, **kwargs): self.user = kwargs.pop('user') super(SupporterForm, self).__init__(*args, **kwargs) def", "'data-bind': 'value: dollars' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value:", "support sign up form This extends the basic payment form,", "should be 300x300 pixels or less\"), 'dollars': _('Companies donating over", "not None and self.user.is_authenticated(): supporter.user = self.user supporter.save() return supporter", "ads This extends the basic payment form, giving fields for", "on :py:class:`StripeModelForm` \"\"\" class Meta: model = Supporter fields =", "= super(SupporterForm, self).save(commit) if commit and self.user is not None", "= forms.CharField(required=True) email = forms.CharField(required=True) def validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars']) *", "import ugettext_lazy as _ from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin from", "self.cleaned_data['site_url'] = None stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read", "'email', 'dollars', 'logo_url', 'site_url', 'public', ) labels = { 'public':", "StripeModelForm): \"\"\"Payment form for ethical ads This extends the basic", "Supporter fields = ( 'last_4_digits', 'name', 'email', 'dollars', ) help_texts", "'email', 'dollars', ) help_texts = { 'email': _('Your email is", "from django import forms from django.conf import settings from django.utils.translation", "'logo_url': _(\"URL of your company's logo, images should be 300x300", "= None self.cleaned_data['site_url'] = None stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd',", "import settings from django.utils.translation import ugettext_lazy as _ from readthedocs.payments.forms", "Supporter fields = ( 'last_4_digits', 'name', 'email', 'dollars', 'logo_url', 'site_url',", "dollars = self.cleaned_data['dollars'] if dollars < 200: self.cleaned_data['logo_url'] = None", "None stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs", "page'), 'email': _('Your email is used for Gravatar and so", ".models import Supporter log = logging.getLogger(__name__) class SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation", "required=True) name = forms.CharField(required=True) email = forms.CharField(required=True) def validate_stripe(self): stripe.Charge.create(", "your company's logo, images should be 300x300 pixels or less\"),", "\"\"\"Call stripe for payment (not ideal here) and clean up", "\"\"\"Donation support sign up form This extends the basic payment", "_('Your email is used for Gravatar and so we can", "_('Your name and image will be displayed on the donation", "'logo_url', 'site_url', 'public', ) labels = { 'public': _('Make this", "expiry, and CVV. The proper Knockout data bindings are established", "(not ideal here) and clean up logo < $200\"\"\" dollars", "email = forms.CharField(required=True) def __init__(self, *args, **kwargs): self.user = kwargs.pop('user')", "and self.user.is_authenticated(): supporter.user = self.user supporter.save() return supporter class EthicalAdForm(StripeResourceMixin,", "logo_url, enable: urls_enabled' }), 'site_url': forms.TextInput(attrs={ 'data-bind': 'value: site_url, enable:", "} help_texts = { 'public': _('Your name and image will", "model = Supporter fields = ( 'last_4_digits', 'name', 'email', 'dollars',", "self.user is not None and self.user.is_authenticated(): supporter.user = self.user supporter.save()", "StripeModelForm, StripeResourceMixin from readthedocs.payments.utils import stripe from .models import Supporter", "up logo < $200\"\"\" dollars = self.cleaned_data['dollars'] if dollars <", "_ from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin from readthedocs.payments.utils import stripe", "can specify a logo URL and site link'), } widgets", "be 300x300 pixels or less\"), 'dollars': _('Companies donating over $400", "Docs Sustained Engineering', receipt_email=self.cleaned_data['email'] ) def save(self, commit=True): supporter =", "'value: dollars' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits'", "payment (not ideal here) and clean up logo < $200\"\"\"", "self.cleaned_data['logo_url'] = None self.cleaned_data['site_url'] = None stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100,", "forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'logo_url': forms.TextInput(attrs={ 'data-bind': 'value: logo_url,", "number, expiry, and CVV. The proper Knockout data bindings are", "{ 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind':", "'site_url': forms.TextInput(attrs={ 'data-bind': 'value: site_url, enable: urls_enabled' }), 'last_4_digits': forms.TextInput(attrs={", "class SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation support sign up form This extends", ") help_texts = { 'email': _('Your email is used so", "URL and site link'), } widgets = { 'dollars': forms.HiddenInput(attrs={", "fields for credit card number, expiry, and CVV. The proper", "on the donation page'), 'email': _('Your email is used for", "and so we can send you a receipt'), 'logo_url': _(\"URL", "= forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email = forms.CharField(required=True) def", "Engineering', receipt_email=self.cleaned_data['email'] ) def save(self, commit=True): supporter = super(SupporterForm, self).save(commit)", "'dollars', ) help_texts = { 'email': _('Your email is used", "self.cleaned_data['dollars'] if dollars < 200: self.cleaned_data['logo_url'] = None self.cleaned_data['site_url'] =", "name and image will be displayed on the donation page'),", "stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sustained", "dollars' }), 'logo_url': forms.TextInput(attrs={ 'data-bind': 'value: logo_url, enable: urls_enabled' }),", "forms.CharField(required=True) email = forms.CharField(required=True) def __init__(self, *args, **kwargs): self.user =", "} widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }),", "this donation public'), } help_texts = { 'public': _('Your name" ]
[ "self._get_multi_todict def _get_one(self, name, *args, **kwargs): return(self._get_one_raw(name, 'All', 'json')) def", "params = { 'q': symbol, 'type': typ, 'output': output, }", "(result[-1][1] == ','): result.pop() # fix single-quoted strings elif (tokid", "option\" \"vol\": \"the volume of options traded.\" } \"\"\" for", "date + \"C\" or \"P\" + price \"strike\": \"strike price", "date.month, 'd': date.day } return(d) def fix_lazy_json(in_text): \"\"\" Handle lazy", "['true', 'false', 'null', '-Infinity', 'Infinity', 'NaN']: tokid = token.STRING tokval", "\"expiry\": \"Expiry\", \"vol\": \"Volume\", \"name\": \"Name\" } df = df.rename(columns=d_cols)", "self.session.get(url, params=params) if response.status_code == 200: content_json = response.text data", "for col in ['Volume', 'oi', 'cid']: df[col] = df[col].map(to_int) df['Expiry']", "m, d): \"\"\" Returns date >>> expiration = {u'd': 1,", "for i, expiration in enumerate(data['expirations']): data['expirations'][i] = ymd_to_date(**expiration) #for col", "= json.loads(json_string) except: json_string = fix_lazy_json(json_string) ret = json.loads(json_string) return", "enumerate(data['expirations']): data['expirations'][i] = ymd_to_date(**expiration) #for col in ['Volume']: # df[col]", "Options see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def init(self, *args, **kwargs):", "typ='All', output='json', y='2014', m='12', d='1'): url = \"https://www.google.com/finance/option_chain\" params =", "import COL, _get_dates, to_float, to_int import pandas as pd #from", "u'\"%s\"' % tokval # fix single-quoted strings elif (tokid ==", "{ 'y': date.year, 'm': date.month, 'd': date.day } return(d) def", "tokengen = tokenize.generate_tokens(StringIO(in_text).readline) result = [] for tokid, tokval, _,", "'expm': expiration['m'], 'expd': expiration['d'], } data = self._get_content(url, params) for", "if tokval not in ['true', 'false', 'null', '-Infinity', 'Infinity', 'NaN']:", "== ','): result.pop() # fix single-quoted strings elif (tokid ==", "'d': 3} \"\"\" d = { 'y': date.year, 'm': date.month,", "unquoted strings if (tokid == token.NAME): if tokval not in", "us something about what country where the stock is traded.", "datetime.date(2014, 12, 1) >>> ymd_to_date(2014, 3, 1) datetime.date(2014, 3, 1)", "1, u'm': 12, u'y': 2014} >>> ymd_to_date(**expiration) datetime.date(2014, 12, 1)", "\"Strike\", \"expiry\": \"Expiry\", \"vol\": \"Volume\", \"name\": \"Name\" } df =", "json import token, tokenize def ymd_to_date(y, m, d): \"\"\" Returns", "\"vol\": \"the volume of options traded.\" } \"\"\" for col", "col in ['Ask', 'Bid', 'c', 'cp', 'Last', 'Strike']: df[col] =", "'expy': expiration['y'], 'expm': expiration['m'], 'expd': expiration['d'], } data = self._get_content(url,", "def _get_content(self, url, params): #response = requests.get(url, params=params) response =", "option code. Basically, Stock Symbol + 7 if mini option", "are currently being held by others. See, http://www.investopedia.com/terms/o/openinterest.asp \"p\": price,", "\"Name\" } df = df.rename(columns=d_cols) \"\"\" d_cols = { \"a\":", "d='1'): url = \"https://www.google.com/finance/option_chain\" params = { 'q': symbol, 'type':", "to_offset from six.moves import cStringIO as StringIO import logging import", "\"\"\" return(datetime.date(year=y, month=m, day=d)) def date_to_ymd(date): \"\"\" Returns dict like", "= df.rename(columns=d_cols) \"\"\" d_cols = { \"a\": \"ask\", \"b\": \"bid\",", "d_cols = { \"a\": \"Ask\", \"b\": \"Bid\", \"p\": \"Last\", \"strike\":", "import traceback import datetime import json import token, tokenize def", "'type': typ, 'output': output, } data = self._get_content(url, params) d", "\"e\": # I think this tells us something about what", "typ lst.append(df_typ) del data[typ] for i, expiration in enumerate(data['expirations']): params", "change direction. \"chg\" = up, \"chr\" = down, \"chg\"? \"e\":", "single-quoted strings elif (tokid == token.STRING): if tokval.startswith (\"'\"): tokval", "= typ lst.append(df_typ) del data[typ] lst.append(df_typ) df = pd.concat(lst, axis=0,", "'m': date.month, 'd': date.day } return(d) def fix_lazy_json(in_text): \"\"\" Handle", "7 if mini option + date + \"C\" or \"P\"", "u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ]", "this option \"name\": I don't know. I have never seen", "']')): if (len(result) > 0) and (result[-1][1] == ','): result.pop()", "try: ret = json.loads(json_string) except: json_string = fix_lazy_json(json_string) ret =", "know. I have never seen a value for this \"oi\":", "\"ask\", \"b\": \"bid\", \"c\": \"change\", \"cid\": \"identity code\", \"cp\": \"cp\"", "{} #d[\"options\"] = df #return(d) return(data) def _get_content(self, url, params):", "fix_lazy_json(in_text): \"\"\" Handle lazy JSON - to fix expecting property", "token.STRING): if tokval.startswith (\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace ('\"',", "response.text data = json_decode(content_json) return(data) if __name__ == \"__main__\": import", "date_to_ymd(date): \"\"\" Returns dict like {'y': ..., 'm': ..., 'd':", "lst = [] for typ in [u'puts', u'calls']: df_typ =", "enumerate(data['expirations']): params = { 'q': symbol, 'output': output, 'expy': expiration['y'],", "dict like {'y': ..., 'm': ..., 'd': ...} >>> date_to_ymd(datetime.date(year=2010,", "'}') or (tokval == ']')): if (len(result) > 0) and", "return ret class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader to fetch data from", "being held by others. See, http://www.investopedia.com/terms/o/openinterest.asp \"p\": price, last \"s\":", "or (tokval == ']')): if (len(result) > 0) and (result[-1][1]", "#!/usr/bin/env python # -*- coding: utf-8 -*- from .base import", "\"Expiry\", \"vol\": \"Volume\", \"name\": \"Name\" } df = df.rename(columns=d_cols) \"\"\"", "= requests.get(url, params=params) response = self.session.get(url, params=params) if response.status_code ==", "('\"', '\\\\\"') result.append((tokid, tokval)) return tokenize.untokenize(result) def json_decode(json_string): try: ret", "1) >>> ymd_to_date(2014, 3, 1) datetime.date(2014, 3, 1) \"\"\" return(datetime.date(year=y,", "\"chg\"? \"e\": # I think this tells us something about", "params): #response = requests.get(url, params=params) response = self.session.get(url, params=params) if", "df_typ['Type'] = typ lst.append(df_typ) del data[typ] lst.append(df_typ) df = pd.concat(lst,", "== token.STRING): if tokval.startswith (\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace", "..., 'd': ...} >>> date_to_ymd(datetime.date(year=2010, month=1, day=3)) {'y': 2010, 'm':", "(\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') # remove", "for this option\" \"vol\": \"the volume of options traded.\" }", "['Volume']: # df[col] = df[col].fillna(0) #d = {} #d[\"options\"] =", "lazy JSON - to fix expecting property name this function", "== '}') or (tokval == ']')): if (len(result) > 0)", "Basically, Stock Symbol + 7 if mini option + date", "#d[\"options\"] = df #return(d) return(data) def _get_content(self, url, params): #response", "ret = json.loads(json_string) return ret class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader to", "about what country where the stock is traded. \"OPRA\" means", "token.NAME): if tokval not in ['true', 'false', 'null', '-Infinity', 'Infinity',", "expiration in enumerate(data['expirations']): params = { 'q': symbol, 'output': output,", "response.status_code == 200: content_json = response.text data = json_decode(content_json) return(data)", "Returns dict like {'y': ..., 'm': ..., 'd': ...} >>>", "tokval)) return tokenize.untokenize(result) def json_decode(json_string): try: ret = json.loads(json_string) except:", "\"\"\" Returns date >>> expiration = {u'd': 1, u'm': 12,", "_get_one(self, name, *args, **kwargs): return(self._get_one_raw(name, 'All', 'json')) def _get_one_raw(self, symbol,", "return(data) def _get_content(self, url, params): #response = requests.get(url, params=params) response", "1, 'd': 3} \"\"\" d = { 'y': date.year, 'm':", "200: content_json = response.text data = json_decode(content_json) return(data) if __name__", "df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] lst.append(df_typ)", "ymd_to_date(**expiration) #for col in ['Volume']: # df[col] = df[col].fillna(0) #d", "expiration['y'], 'expm': expiration['m'], 'expd': expiration['d'], } data = self._get_content(url, params)", "tokval not in ['true', 'false', 'null', '-Infinity', 'Infinity', 'NaN']: tokid", "self._get_multi = self._get_multi_todict def _get_one(self, name, *args, **kwargs): return(self._get_one_raw(name, 'All',", "from .base import DataReaderBase from ..tools import COL, _get_dates, to_float,", ">>> ymd_to_date(2014, 3, 1) datetime.date(2014, 3, 1) \"\"\" return(datetime.date(year=y, month=m,", "http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def init(self, *args, **kwargs): self._get_multi = self._get_multi_todict def", "- to fix expecting property name this function fixes the", "\"expiry\": expiration date for this option \"name\": I don't know.", "= pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] lst.append(df_typ) df", "result = [] for tokid, tokval, _, _, _ in", "df = df.rename(columns=d_cols) \"\"\" d_cols = { \"a\": \"ask\", \"b\":", "data = self._get_content(url, params) d = {} lst = []", "http://www.investopedia.com/terms/o/openinterest.asp \"p\": price, last \"s\": option code. Basically, Stock Symbol", "not in ['true', 'false', 'null', '-Infinity', 'Infinity', 'NaN']: tokid =", "def date_to_ymd(date): \"\"\" Returns dict like {'y': ..., 'm': ...,", "'d': ...} >>> date_to_ymd(datetime.date(year=2010, month=1, day=3)) {'y': 2010, 'm': 1,", "**kwargs): return(self._get_one_raw(name, 'All', 'json')) def _get_one_raw(self, symbol, typ='All', output='json', y='2014',", "price for this option\" \"vol\": \"the volume of options traded.\"", "\"Ask\", \"b\": \"Bid\", \"p\": \"Last\", \"strike\": \"Strike\", \"expiry\": \"Expiry\", \"vol\":", "'null', '-Infinity', 'Infinity', 'NaN']: tokid = token.STRING tokval = u'\"%s\"'", "df[col] = df[col].map(to_float) for col in ['Volume', 'oi', 'cid']: df[col]", "= { 'q': symbol, 'type': typ, 'output': output, } data", "\"\"\" DataReader to fetch data from Google Finance Options see", "seen a value for this \"oi\": open interest. How many", "\"strike price for this option\" \"vol\": \"the volume of options", "'q': symbol, 'type': typ, 'output': output, } data = self._get_content(url,", "[] for tokid, tokval, _, _, _ in tokengen: #", "strings elif (tokid == token.STRING): if tokval.startswith (\"'\"): tokval =", "result.pop() # fix single-quoted strings elif (tokid == token.STRING): if", "\"s\": option code. Basically, Stock Symbol + 7 if mini", "df data['underlying_id'] = int(data['underlying_id']) data['expiry'] = ymd_to_date(**data['expiry']) for i, expiration", "strings if (tokid == token.NAME): if tokval not in ['true',", "Returns date >>> expiration = {u'd': 1, u'm': 12, u'y':", "params=params) response = self.session.get(url, params=params) if response.status_code == 200: content_json", "of options traded.\" } \"\"\" for col in ['Ask', 'Bid',", "_, _, _ in tokengen: # fix unquoted strings if", "pandas.tseries.frequencies import to_offset from six.moves import cStringIO as StringIO import", "to fetch data from Google Finance Options see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain", "{'y': 2010, 'm': 1, 'd': 3} \"\"\" d = {", "response = self.session.get(url, params=params) if response.status_code == 200: content_json =", "'output': output, 'expy': expiration['y'], 'expm': expiration['m'], 'expd': expiration['d'], } data", "[] for typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type']", "init(self, *args, **kwargs): self._get_multi = self._get_multi_todict def _get_one(self, name, *args,", "datetime.date(2014, 3, 1) \"\"\" return(datetime.date(year=y, month=m, day=d)) def date_to_ymd(date): \"\"\"", "class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader to fetch data from Google Finance", "-*- coding: utf-8 -*- from .base import DataReaderBase from ..tools", "= [] for typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ])", "u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') # remove invalid commas elif", "= pd.concat(lst, axis=0, ignore_index=True) d_cols = { \"a\": \"Ask\", \"b\":", "= token.STRING tokval = u'\"%s\"' % tokval # fix single-quoted", "fix_lazy_json(json_string) ret = json.loads(json_string) return ret class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader", "(len(result) > 0) and (result[-1][1] == ','): result.pop() # fix", "country where the stock is traded. \"OPRA\" means USA. \"expiry\":", "Google Finance Options see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def init(self,", "if (tokid == token.NAME): if tokval not in ['true', 'false',", "currently being held by others. See, http://www.investopedia.com/terms/o/openinterest.asp \"p\": price, last", "interest. How many of these are currently being held by", "\"\"\" for col in ['Ask', 'Bid', 'c', 'cp', 'Last', 'Strike']:", "return tokenize.untokenize(result) def json_decode(json_string): try: ret = json.loads(json_string) except: json_string", "= tokenize.generate_tokens(StringIO(in_text).readline) result = [] for tokid, tokval, _, _,", "0) and (result[-1][1] == ','): result.pop() # fix single-quoted strings", "price \"strike\": \"strike price for this option\" \"vol\": \"the volume", "axis=0, ignore_index=True) d_cols = { \"a\": \"Ask\", \"b\": \"Bid\", \"p\":", "price, last \"s\": option code. Basically, Stock Symbol + 7", "d = { 'y': date.year, 'm': date.month, 'd': date.day }", "'NaN']: tokid = token.STRING tokval = u'\"%s\"' % tokval #", "int(data['underlying_id']) data['expiry'] = ymd_to_date(**data['expiry']) for i, expiration in enumerate(data['expirations']): data['expirations'][i]", ">>> expiration = {u'd': 1, u'm': 12, u'y': 2014} >>>", "these are currently being held by others. See, http://www.investopedia.com/terms/o/openinterest.asp \"p\":", "df[col].map(to_int) df['Expiry'] = pd.to_datetime(df['Expiry']) data['options'] = df data['underlying_id'] = int(data['underlying_id'])", "d_cols = { \"a\": \"ask\", \"b\": \"bid\", \"c\": \"change\", \"cid\":", "up, \"chr\" = down, \"chg\"? \"e\": # I think this", "tokval.startswith (\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') #", "in ['true', 'false', 'null', '-Infinity', 'Infinity', 'NaN']: tokid = token.STRING", "expecting property name this function fixes the json output from", "= { \"a\": \"Ask\", \"b\": \"Bid\", \"p\": \"Last\", \"strike\": \"Strike\",", "def ymd_to_date(y, m, d): \"\"\" Returns date >>> expiration =", "typ, 'output': output, } data = self._get_content(url, params) d =", "self._get_content(url, params) d = {} lst = [] for typ", ">>> date_to_ymd(datetime.date(year=2010, month=1, day=3)) {'y': 2010, 'm': 1, 'd': 3}", "'y': date.year, 'm': date.month, 'd': date.day } return(d) def fix_lazy_json(in_text):", "def _get_one(self, name, *args, **kwargs): return(self._get_one_raw(name, 'All', 'json')) def _get_one_raw(self,", "'-Infinity', 'Infinity', 'NaN']: tokid = token.STRING tokval = u'\"%s\"' %", "# I think this tells us something about what country", "df #return(d) return(data) def _get_content(self, url, params): #response = requests.get(url,", "import DataReaderBase from ..tools import COL, _get_dates, to_float, to_int import", "data['underlying_id'] = int(data['underlying_id']) data['expiry'] = ymd_to_date(**data['expiry']) for i, expiration in", "import datetime import json import token, tokenize def ymd_to_date(y, m,", "= json.loads(json_string) return ret class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader to fetch", "six.moves import cStringIO as StringIO import logging import traceback import", "data[typ] lst.append(df_typ) df = pd.concat(lst, axis=0, ignore_index=True) d_cols = {", "= response.text data = json_decode(content_json) return(data) if __name__ == \"__main__\":", "ymd_to_date(**expiration) datetime.date(2014, 12, 1) >>> ymd_to_date(2014, 3, 1) datetime.date(2014, 3,", "(tokid == token.OP) and ((tokval == '}') or (tokval ==", "this option\" \"vol\": \"the volume of options traded.\" } \"\"\"", "} \"\"\" for col in ['Ask', 'Bid', 'c', 'cp', 'Last',", "params=params) if response.status_code == 200: content_json = response.text data =", "url = \"https://www.google.com/finance/option_chain\" params = { 'q': symbol, 'type': typ,", "(tokval == ']')): if (len(result) > 0) and (result[-1][1] ==", "params = { 'q': symbol, 'output': output, 'expy': expiration['y'], 'expm':", "open interest. How many of these are currently being held", "traded.\" } \"\"\" for col in ['Ask', 'Bid', 'c', 'cp',", "date_to_ymd(datetime.date(year=2010, month=1, day=3)) {'y': 2010, 'm': 1, 'd': 3} \"\"\"", "name this function fixes the json output from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name", "for this \"oi\": open interest. How many of these are", "\"\"\" d_cols = { \"a\": \"ask\", \"b\": \"bid\", \"c\": \"change\",", "pandas as pd #from pandas.tseries.frequencies import to_offset from six.moves import", "where the stock is traded. \"OPRA\" means USA. \"expiry\": expiration", "= self._get_multi_todict def _get_one(self, name, *args, **kwargs): return(self._get_one_raw(name, 'All', 'json'))", "} df = df.rename(columns=d_cols) \"\"\" d_cols = { \"a\": \"ask\",", "> 0) and (result[-1][1] == ','): result.pop() # fix single-quoted", "+ price \"strike\": \"strike price for this option\" \"vol\": \"the", "token, tokenize def ymd_to_date(y, m, d): \"\"\" Returns date >>>", "json.loads(json_string) return ret class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader to fetch data", "#for col in ['Volume']: # df[col] = df[col].fillna(0) #d =", "\"cs\": change direction. \"chg\" = up, \"chr\" = down, \"chg\"?", "'\\\\\"') # remove invalid commas elif (tokid == token.OP) and", "from ..tools import COL, _get_dates, to_float, to_int import pandas as", "col in ['Volume', 'oi', 'cid']: df[col] = df[col].map(to_int) df['Expiry'] =", "python # -*- coding: utf-8 -*- from .base import DataReaderBase", "for col in ['Ask', 'Bid', 'c', 'cp', 'Last', 'Strike']: df[col]", "df[col] = df[col].fillna(0) #d = {} #d[\"options\"] = df #return(d)", "month=m, day=d)) def date_to_ymd(date): \"\"\" Returns dict like {'y': ...,", "DataReaderBase from ..tools import COL, _get_dates, to_float, to_int import pandas", "symbol, typ='All', output='json', y='2014', m='12', d='1'): url = \"https://www.google.com/finance/option_chain\" params", ">>> ymd_to_date(**expiration) datetime.date(2014, 12, 1) >>> ymd_to_date(2014, 3, 1) datetime.date(2014,", "\"cid\": \"identity code\", \"cp\": \"cp\" \"cs\": change direction. \"chg\" =", "return(self._get_one_raw(name, 'All', 'json')) def _get_one_raw(self, symbol, typ='All', output='json', y='2014', m='12',", "\"the volume of options traded.\" } \"\"\" for col in", "...} >>> date_to_ymd(datetime.date(year=2010, month=1, day=3)) {'y': 2010, 'm': 1, 'd':", "data from Google Finance Options see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\"", "tells us something about what country where the stock is", "*args, **kwargs): return(self._get_one_raw(name, 'All', 'json')) def _get_one_raw(self, symbol, typ='All', output='json',", "col in ['Volume']: # df[col] = df[col].fillna(0) #d = {}", "utf-8 -*- from .base import DataReaderBase from ..tools import COL,", "ymd_to_date(2014, 3, 1) datetime.date(2014, 3, 1) \"\"\" return(datetime.date(year=y, month=m, day=d))", "\"name\": I don't know. I have never seen a value", "#response = requests.get(url, params=params) response = self.session.get(url, params=params) if response.status_code", "{ \"a\": \"ask\", \"b\": \"bid\", \"c\": \"change\", \"cid\": \"identity code\",", "expiration['m'], 'expd': expiration['d'], } data = self._get_content(url, params) for typ", "like {'y': ..., 'm': ..., 'd': ...} >>> date_to_ymd(datetime.date(year=2010, month=1,", "direction. \"chg\" = up, \"chr\" = down, \"chg\"? \"e\": #", "'cid']: df[col] = df[col].map(to_int) df['Expiry'] = pd.to_datetime(df['Expiry']) data['options'] = df", "\"Bid\", \"p\": \"Last\", \"strike\": \"Strike\", \"expiry\": \"Expiry\", \"vol\": \"Volume\", \"name\":", "as StringIO import logging import traceback import datetime import json", "fix expecting property name this function fixes the json output", "fixes the json output from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen =", "\"oi\": open interest. How many of these are currently being", "i, expiration in enumerate(data['expirations']): data['expirations'][i] = ymd_to_date(**expiration) #for col in", "expiration in enumerate(data['expirations']): data['expirations'][i] = ymd_to_date(**expiration) #for col in ['Volume']:", "output, } data = self._get_content(url, params) d = {} lst", "'Last', 'Strike']: df[col] = df[col].map(to_float) for col in ['Volume', 'oi',", "expiration = {u'd': 1, u'm': 12, u'y': 2014} >>> ymd_to_date(**expiration)", "df = pd.concat(lst, axis=0, ignore_index=True) d_cols = { \"a\": \"Ask\",", "'Bid', 'c', 'cp', 'Last', 'Strike']: df[col] = df[col].map(to_float) for col", "tokid = token.STRING tokval = u'\"%s\"' % tokval # fix", "month=1, day=3)) {'y': 2010, 'm': 1, 'd': 3} \"\"\" d", "USA. \"expiry\": expiration date for this option \"name\": I don't", "fix single-quoted strings elif (tokid == token.STRING): if tokval.startswith (\"'\"):", "12, u'y': 2014} >>> ymd_to_date(**expiration) datetime.date(2014, 12, 1) >>> ymd_to_date(2014,", "\"strike\": \"Strike\", \"expiry\": \"Expiry\", \"vol\": \"Volume\", \"name\": \"Name\" } df", "for i, expiration in enumerate(data['expirations']): params = { 'q': symbol,", "} return(d) def fix_lazy_json(in_text): \"\"\" Handle lazy JSON - to", "fetch data from Google Finance Options see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api", "Handle lazy JSON - to fix expecting property name this", "invalid commas elif (tokid == token.OP) and ((tokval == '}')", "self._get_content(url, params) for typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ])", "= u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') # remove invalid commas", "from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen = tokenize.generate_tokens(StringIO(in_text).readline) result = []", "to fix expecting property name this function fixes the json", "== token.OP) and ((tokval == '}') or (tokval == ']')):", "= fix_lazy_json(json_string) ret = json.loads(json_string) return ret class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\"", "d = {} lst = [] for typ in [u'puts',", "ret class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader to fetch data from Google", "for this option \"name\": I don't know. I have never", "coding: utf-8 -*- from .base import DataReaderBase from ..tools import", "if response.status_code == 200: content_json = response.text data = json_decode(content_json)", "= json_decode(content_json) return(data) if __name__ == \"__main__\": import doctest doctest.testmod()", "% tokval[1:-1].replace ('\"', '\\\\\"') # remove invalid commas elif (tokid", "= ymd_to_date(**data['expiry']) for i, expiration in enumerate(data['expirations']): data['expirations'][i] = ymd_to_date(**expiration)", "I don't know. I have never seen a value for", "= ymd_to_date(**expiration) #for col in ['Volume']: # df[col] = df[col].fillna(0)", "= u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') result.append((tokid, tokval)) return tokenize.untokenize(result)", "the stock is traded. \"OPRA\" means USA. \"expiry\": expiration date", "= self._get_content(url, params) d = {} lst = [] for", "down, \"chg\"? \"e\": # I think this tells us something", "# fix single-quoted strings elif (tokid == token.STRING): if tokval.startswith", "\"vol\": \"Volume\", \"name\": \"Name\" } df = df.rename(columns=d_cols) \"\"\" d_cols", "code\", \"cp\": \"cp\" \"cs\": change direction. \"chg\" = up, \"chr\"", "think this tells us something about what country where the", "**kwargs): self._get_multi = self._get_multi_todict def _get_one(self, name, *args, **kwargs): return(self._get_one_raw(name,", "# remove invalid commas elif (tokid == token.OP) and ((tokval", "DataReader to fetch data from Google Finance Options see https://www.google.com/finance/option_chain", "df[col] = df[col].map(to_int) df['Expiry'] = pd.to_datetime(df['Expiry']) data['options'] = df data['underlying_id']", "import logging import traceback import datetime import json import token,", "commas elif (tokid == token.OP) and ((tokval == '}') or", "[u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del", "something about what country where the stock is traded. \"OPRA\"", "df[col].map(to_float) for col in ['Volume', 'oi', 'cid']: df[col] = df[col].map(to_int)", "symbol, 'output': output, 'expy': expiration['y'], 'expm': expiration['m'], 'expd': expiration['d'], }", "['Volume', 'oi', 'cid']: df[col] = df[col].map(to_int) df['Expiry'] = pd.to_datetime(df['Expiry']) data['options']", "','): result.pop() # fix single-quoted strings elif (tokid == token.STRING):", "name, *args, **kwargs): return(self._get_one_raw(name, 'All', 'json')) def _get_one_raw(self, symbol, typ='All',", "3} \"\"\" d = { 'y': date.year, 'm': date.month, 'd':", "tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') # remove invalid", "data[typ] for i, expiration in enumerate(data['expirations']): params = { 'q':", "Symbol + 7 if mini option + date + \"C\"", "# -*- coding: utf-8 -*- from .base import DataReaderBase from", "} data = self._get_content(url, params) for typ in [u'puts', u'calls']:", "Stock Symbol + 7 if mini option + date +", "'m': 1, 'd': 3} \"\"\" d = { 'y': date.year,", "_get_one_raw(self, symbol, typ='All', output='json', y='2014', m='12', d='1'): url = \"https://www.google.com/finance/option_chain\"", "d): \"\"\" Returns date >>> expiration = {u'd': 1, u'm':", "tokenize.untokenize(result) def json_decode(json_string): try: ret = json.loads(json_string) except: json_string =", "\"b\": \"bid\", \"c\": \"change\", \"cid\": \"identity code\", \"cp\": \"cp\" \"cs\":", "remove invalid commas elif (tokid == token.OP) and ((tokval ==", "\"\"\" def init(self, *args, **kwargs): self._get_multi = self._get_multi_todict def _get_one(self,", "data = json_decode(content_json) return(data) if __name__ == \"__main__\": import doctest", "import cStringIO as StringIO import logging import traceback import datetime", ".base import DataReaderBase from ..tools import COL, _get_dates, to_float, to_int", "as pd #from pandas.tseries.frequencies import to_offset from six.moves import cStringIO", "(tokid == token.NAME): if tokval not in ['true', 'false', 'null',", "DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader to fetch data from Google Finance Options", "this \"oi\": open interest. How many of these are currently", "'output': output, } data = self._get_content(url, params) d = {}", "if tokval.startswith (\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"')", "datetime import json import token, tokenize def ymd_to_date(y, m, d):", "a value for this \"oi\": open interest. How many of", "'m': ..., 'd': ...} >>> date_to_ymd(datetime.date(year=2010, month=1, day=3)) {'y': 2010,", "ret = json.loads(json_string) except: json_string = fix_lazy_json(json_string) ret = json.loads(json_string)", "pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] lst.append(df_typ) df =", "google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen = tokenize.generate_tokens(StringIO(in_text).readline) result = [] for", "\"p\": \"Last\", \"strike\": \"Strike\", \"expiry\": \"Expiry\", \"vol\": \"Volume\", \"name\": \"Name\"", "{'y': ..., 'm': ..., 'd': ...} >>> date_to_ymd(datetime.date(year=2010, month=1, day=3))", "options traded.\" } \"\"\" for col in ['Ask', 'Bid', 'c',", "tokenize def ymd_to_date(y, m, d): \"\"\" Returns date >>> expiration", "http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen = tokenize.generate_tokens(StringIO(in_text).readline) result = [] for tokid,", "params) d = {} lst = [] for typ in", "in enumerate(data['expirations']): params = { 'q': symbol, 'output': output, 'expy':", "= { \"a\": \"ask\", \"b\": \"bid\", \"c\": \"change\", \"cid\": \"identity", "and ((tokval == '}') or (tokval == ']')): if (len(result)", "\"bid\", \"c\": \"change\", \"cid\": \"identity code\", \"cp\": \"cp\" \"cs\": change", "to_int import pandas as pd #from pandas.tseries.frequencies import to_offset from", "tokval, _, _, _ in tokengen: # fix unquoted strings", "from six.moves import cStringIO as StringIO import logging import traceback", "return(datetime.date(year=y, month=m, day=d)) def date_to_ymd(date): \"\"\" Returns dict like {'y':", "= self.session.get(url, params=params) if response.status_code == 200: content_json = response.text", "12, 1) >>> ymd_to_date(2014, 3, 1) datetime.date(2014, 3, 1) \"\"\"", "cStringIO as StringIO import logging import traceback import datetime import", "\"a\": \"ask\", \"b\": \"bid\", \"c\": \"change\", \"cid\": \"identity code\", \"cp\":", "StringIO import logging import traceback import datetime import json import", "params) for typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type']", "in tokengen: # fix unquoted strings if (tokid == token.NAME):", "= up, \"chr\" = down, \"chg\"? \"e\": # I think", "#d = {} #d[\"options\"] = df #return(d) return(data) def _get_content(self,", "typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ", "m='12', d='1'): url = \"https://www.google.com/finance/option_chain\" params = { 'q': symbol,", "\"strike\": \"strike price for this option\" \"vol\": \"the volume of", "\"P\" + price \"strike\": \"strike price for this option\" \"vol\":", "= df[col].fillna(0) #d = {} #d[\"options\"] = df #return(d) return(data)", "..., 'm': ..., 'd': ...} >>> date_to_ymd(datetime.date(year=2010, month=1, day=3)) {'y':", "df_typ['Type'] = typ lst.append(df_typ) del data[typ] for i, expiration in", "volume of options traded.\" } \"\"\" for col in ['Ask',", "for typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] =", "json_decode(json_string): try: ret = json.loads(json_string) except: json_string = fix_lazy_json(json_string) ret", "'false', 'null', '-Infinity', 'Infinity', 'NaN']: tokid = token.STRING tokval =", "\"Last\", \"strike\": \"Strike\", \"expiry\": \"Expiry\", \"vol\": \"Volume\", \"name\": \"Name\" }", "\"identity code\", \"cp\": \"cp\" \"cs\": change direction. \"chg\" = up,", "\"change\", \"cid\": \"identity code\", \"cp\": \"cp\" \"cs\": change direction. \"chg\"", "JSON - to fix expecting property name this function fixes", "= int(data['underlying_id']) data['expiry'] = ymd_to_date(**data['expiry']) for i, expiration in enumerate(data['expirations']):", "\"\"\" Handle lazy JSON - to fix expecting property name", "or \"P\" + price \"strike\": \"strike price for this option\"", "data['options'] = df data['underlying_id'] = int(data['underlying_id']) data['expiry'] = ymd_to_date(**data['expiry']) for", "if (len(result) > 0) and (result[-1][1] == ','): result.pop() #", "data['expiry'] = ymd_to_date(**data['expiry']) for i, expiration in enumerate(data['expirations']): data['expirations'][i] =", "'\\\\\"') result.append((tokid, tokval)) return tokenize.untokenize(result) def json_decode(json_string): try: ret =", "= { 'q': symbol, 'output': output, 'expy': expiration['y'], 'expm': expiration['m'],", "(\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') result.append((tokid, tokval))", "3, 1) \"\"\" return(datetime.date(year=y, month=m, day=d)) def date_to_ymd(date): \"\"\" Returns", "\"\"\" d = { 'y': date.year, 'm': date.month, 'd': date.day", "in ['Volume']: # df[col] = df[col].fillna(0) #d = {} #d[\"options\"]", "except: json_string = fix_lazy_json(json_string) ret = json.loads(json_string) return ret class", "1) \"\"\" return(datetime.date(year=y, month=m, day=d)) def date_to_ymd(date): \"\"\" Returns dict", "tokenize.generate_tokens(StringIO(in_text).readline) result = [] for tokid, tokval, _, _, _", "df.rename(columns=d_cols) \"\"\" d_cols = { \"a\": \"ask\", \"b\": \"bid\", \"c\":", "tokval.startswith (\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') result.append((tokid,", "'Strike']: df[col] = df[col].map(to_float) for col in ['Volume', 'oi', 'cid']:", "fix unquoted strings if (tokid == token.NAME): if tokval not", "in ['Ask', 'Bid', 'c', 'cp', 'Last', 'Strike']: df[col] = df[col].map(to_float)", "df['Expiry'] = pd.to_datetime(df['Expiry']) data['options'] = df data['underlying_id'] = int(data['underlying_id']) data['expiry']", "} data = self._get_content(url, params) d = {} lst =", "= {u'd': 1, u'm': 12, u'y': 2014} >>> ymd_to_date(**expiration) datetime.date(2014,", "for tokid, tokval, _, _, _ in tokengen: # fix", "to_float, to_int import pandas as pd #from pandas.tseries.frequencies import to_offset", "ignore_index=True) d_cols = { \"a\": \"Ask\", \"b\": \"Bid\", \"p\": \"Last\",", "\"Volume\", \"name\": \"Name\" } df = df.rename(columns=d_cols) \"\"\" d_cols =", "_get_content(self, url, params): #response = requests.get(url, params=params) response = self.session.get(url,", "{ \"a\": \"Ask\", \"b\": \"Bid\", \"p\": \"Last\", \"strike\": \"Strike\", \"expiry\":", "code. Basically, Stock Symbol + 7 if mini option +", "Finance Options see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def init(self, *args,", "https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def init(self, *args, **kwargs): self._get_multi = self._get_multi_todict", "u'y': 2014} >>> ymd_to_date(**expiration) datetime.date(2014, 12, 1) >>> ymd_to_date(2014, 3,", "data['expirations'][i] = ymd_to_date(**expiration) #for col in ['Volume']: # df[col] =", "3, 1) datetime.date(2014, 3, 1) \"\"\" return(datetime.date(year=y, month=m, day=d)) def", "2014} >>> ymd_to_date(**expiration) datetime.date(2014, 12, 1) >>> ymd_to_date(2014, 3, 1)", "and (result[-1][1] == ','): result.pop() # fix single-quoted strings elif", "stock is traded. \"OPRA\" means USA. \"expiry\": expiration date for", "tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') result.append((tokid, tokval)) return", "(tokid == token.STRING): if tokval.startswith (\"'\"): tokval = u'\"%s\"' %", "= pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] for i,", "{ 'q': symbol, 'output': output, 'expy': expiration['y'], 'expm': expiration['m'], 'expd':", "in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ)", "what country where the stock is traded. \"OPRA\" means USA.", "% tokval # fix single-quoted strings elif (tokid == token.STRING):", "['Ask', 'Bid', 'c', 'cp', 'Last', 'Strike']: df[col] = df[col].map(to_float) for", "COL, _get_dates, to_float, to_int import pandas as pd #from pandas.tseries.frequencies", "see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def init(self, *args, **kwargs): self._get_multi", "output from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen = tokenize.generate_tokens(StringIO(in_text).readline) result =", "'q': symbol, 'output': output, 'expy': expiration['y'], 'expm': expiration['m'], 'expd': expiration['d'],", "u'm': 12, u'y': 2014} >>> ymd_to_date(**expiration) datetime.date(2014, 12, 1) >>>", "output, 'expy': expiration['y'], 'expm': expiration['m'], 'expd': expiration['d'], } data =", "of these are currently being held by others. See, http://www.investopedia.com/terms/o/openinterest.asp", "\"chg\" = up, \"chr\" = down, \"chg\"? \"e\": # I", "See, http://www.investopedia.com/terms/o/openinterest.asp \"p\": price, last \"s\": option code. Basically, Stock", "is traded. \"OPRA\" means USA. \"expiry\": expiration date for this", "in ['Volume', 'oi', 'cid']: df[col] = df[col].map(to_int) df['Expiry'] = pd.to_datetime(df['Expiry'])", "{} lst = [] for typ in [u'puts', u'calls']: df_typ", "lst.append(df_typ) del data[typ] for i, expiration in enumerate(data['expirations']): params =", "ymd_to_date(y, m, d): \"\"\" Returns date >>> expiration = {u'd':", "traded. \"OPRA\" means USA. \"expiry\": expiration date for this option", "= pd.to_datetime(df['Expiry']) data['options'] = df data['underlying_id'] = int(data['underlying_id']) data['expiry'] =", "pd.concat(lst, axis=0, ignore_index=True) d_cols = { \"a\": \"Ask\", \"b\": \"Bid\",", "date >>> expiration = {u'd': 1, u'm': 12, u'y': 2014}", "{ 'q': symbol, 'type': typ, 'output': output, } data =", "pd.to_datetime(df['Expiry']) data['options'] = df data['underlying_id'] = int(data['underlying_id']) data['expiry'] = ymd_to_date(**data['expiry'])", "def _get_one_raw(self, symbol, typ='All', output='json', y='2014', m='12', d='1'): url =", "pd #from pandas.tseries.frequencies import to_offset from six.moves import cStringIO as", "json output from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen = tokenize.generate_tokens(StringIO(in_text).readline) result", "_, _ in tokengen: # fix unquoted strings if (tokid", "\"https://www.google.com/finance/option_chain\" params = { 'q': symbol, 'type': typ, 'output': output,", "How many of these are currently being held by others.", "\"a\": \"Ask\", \"b\": \"Bid\", \"p\": \"Last\", \"strike\": \"Strike\", \"expiry\": \"Expiry\",", "del data[typ] lst.append(df_typ) df = pd.concat(lst, axis=0, ignore_index=True) d_cols =", "json_string = fix_lazy_json(json_string) ret = json.loads(json_string) return ret class DataReaderGoogleFinanceOptions(DataReaderBase):", "def init(self, *args, **kwargs): self._get_multi = self._get_multi_todict def _get_one(self, name,", "i, expiration in enumerate(data['expirations']): params = { 'q': symbol, 'output':", "function fixes the json output from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen", "import to_offset from six.moves import cStringIO as StringIO import logging", "% tokval[1:-1].replace ('\"', '\\\\\"') result.append((tokid, tokval)) return tokenize.untokenize(result) def json_decode(json_string):", "# df[col] = df[col].fillna(0) #d = {} #d[\"options\"] = df", "I have never seen a value for this \"oi\": open", "+ \"C\" or \"P\" + price \"strike\": \"strike price for", "df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] for", "tokid, tokval, _, _, _ in tokengen: # fix unquoted", "('\"', '\\\\\"') # remove invalid commas elif (tokid == token.OP)", "== ']')): if (len(result) > 0) and (result[-1][1] == ','):", "from Google Finance Options see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def", "held by others. See, http://www.investopedia.com/terms/o/openinterest.asp \"p\": price, last \"s\": option", "'cp', 'Last', 'Strike']: df[col] = df[col].map(to_float) for col in ['Volume',", "ymd_to_date(**data['expiry']) for i, expiration in enumerate(data['expirations']): data['expirations'][i] = ymd_to_date(**expiration) #for", "this tells us something about what country where the stock", "traceback import datetime import json import token, tokenize def ymd_to_date(y,", "in enumerate(data['expirations']): data['expirations'][i] = ymd_to_date(**expiration) #for col in ['Volume']: #", "'expd': expiration['d'], } data = self._get_content(url, params) for typ in", "import pandas as pd #from pandas.tseries.frequencies import to_offset from six.moves", "\"p\": price, last \"s\": option code. Basically, Stock Symbol +", "if mini option + date + \"C\" or \"P\" +", "value for this \"oi\": open interest. How many of these", "= self._get_content(url, params) for typ in [u'puts', u'calls']: df_typ =", "tokval[1:-1].replace ('\"', '\\\\\"') # remove invalid commas elif (tokid ==", "don't know. I have never seen a value for this", "date.year, 'm': date.month, 'd': date.day } return(d) def fix_lazy_json(in_text): \"\"\"", "= u'\"%s\"' % tokval # fix single-quoted strings elif (tokid", "\"\"\" Returns dict like {'y': ..., 'm': ..., 'd': ...}", "\"C\" or \"P\" + price \"strike\": \"strike price for this", "token.OP) and ((tokval == '}') or (tokval == ']')): if", "\"name\": \"Name\" } df = df.rename(columns=d_cols) \"\"\" d_cols = {", "token.STRING tokval = u'\"%s\"' % tokval # fix single-quoted strings", "day=3)) {'y': 2010, 'm': 1, 'd': 3} \"\"\" d =", "y='2014', m='12', d='1'): url = \"https://www.google.com/finance/option_chain\" params = { 'q':", "'All', 'json')) def _get_one_raw(self, symbol, typ='All', output='json', y='2014', m='12', d='1'):", "2010, 'm': 1, 'd': 3} \"\"\" d = { 'y':", "= \"https://www.google.com/finance/option_chain\" params = { 'q': symbol, 'type': typ, 'output':", "others. See, http://www.investopedia.com/terms/o/openinterest.asp \"p\": price, last \"s\": option code. Basically,", "lst.append(df_typ) df = pd.concat(lst, axis=0, ignore_index=True) d_cols = { \"a\":", "never seen a value for this \"oi\": open interest. How", "\"cp\" \"cs\": change direction. \"chg\" = up, \"chr\" = down,", "content_json = response.text data = json_decode(content_json) return(data) if __name__ ==", "+ 7 if mini option + date + \"C\" or", "\"b\": \"Bid\", \"p\": \"Last\", \"strike\": \"Strike\", \"expiry\": \"Expiry\", \"vol\": \"Volume\",", "return(d) def fix_lazy_json(in_text): \"\"\" Handle lazy JSON - to fix", "url, params): #response = requests.get(url, params=params) response = self.session.get(url, params=params)", "# fix unquoted strings if (tokid == token.NAME): if tokval", "'c', 'cp', 'Last', 'Strike']: df[col] = df[col].map(to_float) for col in", "logging import traceback import datetime import json import token, tokenize", "def fix_lazy_json(in_text): \"\"\" Handle lazy JSON - to fix expecting", "https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def init(self, *args, **kwargs): self._get_multi =", "..tools import COL, _get_dates, to_float, to_int import pandas as pd", "date for this option \"name\": I don't know. I have", "result.append((tokid, tokval)) return tokenize.untokenize(result) def json_decode(json_string): try: ret = json.loads(json_string)", "-*- from .base import DataReaderBase from ..tools import COL, _get_dates,", "json.loads(json_string) except: json_string = fix_lazy_json(json_string) ret = json.loads(json_string) return ret", "by others. See, http://www.investopedia.com/terms/o/openinterest.asp \"p\": price, last \"s\": option code.", "= [] for tokid, tokval, _, _, _ in tokengen:", "df[col].fillna(0) #d = {} #d[\"options\"] = df #return(d) return(data) def", "= typ lst.append(df_typ) del data[typ] for i, expiration in enumerate(data['expirations']):", "= df[col].map(to_float) for col in ['Volume', 'oi', 'cid']: df[col] =", "= {} #d[\"options\"] = df #return(d) return(data) def _get_content(self, url,", "\"cp\": \"cp\" \"cs\": change direction. \"chg\" = up, \"chr\" =", "mini option + date + \"C\" or \"P\" + price", "date.day } return(d) def fix_lazy_json(in_text): \"\"\" Handle lazy JSON -", "= df[col].map(to_int) df['Expiry'] = pd.to_datetime(df['Expiry']) data['options'] = df data['underlying_id'] =", "= df #return(d) return(data) def _get_content(self, url, params): #response =", "== token.NAME): if tokval not in ['true', 'false', 'null', '-Infinity',", "#return(d) return(data) def _get_content(self, url, params): #response = requests.get(url, params=params)", "option \"name\": I don't know. I have never seen a", "many of these are currently being held by others. See,", "elif (tokid == token.STRING): if tokval.startswith (\"'\"): tokval = u'\"%s\"'", "tokval = u'\"%s\"' % tokval # fix single-quoted strings elif", "= down, \"chg\"? \"e\": # I think this tells us", "symbol, 'type': typ, 'output': output, } data = self._get_content(url, params)", "= {} lst = [] for typ in [u'puts', u'calls']:", "output='json', y='2014', m='12', d='1'): url = \"https://www.google.com/finance/option_chain\" params = {", "lst.append(df_typ) del data[typ] lst.append(df_typ) df = pd.concat(lst, axis=0, ignore_index=True) d_cols", "pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] for i, expiration", "'oi', 'cid']: df[col] = df[col].map(to_int) df['Expiry'] = pd.to_datetime(df['Expiry']) data['options'] =", "= { 'y': date.year, 'm': date.month, 'd': date.day } return(d)", "last \"s\": option code. Basically, Stock Symbol + 7 if", "tokval # fix single-quoted strings elif (tokid == token.STRING): if", "\"OPRA\" means USA. \"expiry\": expiration date for this option \"name\":", "requests.get(url, params=params) response = self.session.get(url, params=params) if response.status_code == 200:", "elif (tokid == token.OP) and ((tokval == '}') or (tokval", "'Infinity', 'NaN']: tokid = token.STRING tokval = u'\"%s\"' % tokval", "*args, **kwargs): self._get_multi = self._get_multi_todict def _get_one(self, name, *args, **kwargs):", "_get_dates, to_float, to_int import pandas as pd #from pandas.tseries.frequencies import", "\"c\": \"change\", \"cid\": \"identity code\", \"cp\": \"cp\" \"cs\": change direction.", "== 200: content_json = response.text data = json_decode(content_json) return(data) if", "expiration date for this option \"name\": I don't know. I", "option + date + \"C\" or \"P\" + price \"strike\":", "((tokval == '}') or (tokval == ']')): if (len(result) >", "\"chr\" = down, \"chg\"? \"e\": # I think this tells", "+ date + \"C\" or \"P\" + price \"strike\": \"strike", "{u'd': 1, u'm': 12, u'y': 2014} >>> ymd_to_date(**expiration) datetime.date(2014, 12,", "_ in tokengen: # fix unquoted strings if (tokid ==", "del data[typ] for i, expiration in enumerate(data['expirations']): params = {", "day=d)) def date_to_ymd(date): \"\"\" Returns dict like {'y': ..., 'm':", "\"\"\" tokengen = tokenize.generate_tokens(StringIO(in_text).readline) result = [] for tokid, tokval,", "'json')) def _get_one_raw(self, symbol, typ='All', output='json', y='2014', m='12', d='1'): url", "expiration['d'], } data = self._get_content(url, params) for typ in [u'puts',", "1) datetime.date(2014, 3, 1) \"\"\" return(datetime.date(year=y, month=m, day=d)) def date_to_ymd(date):", "tokengen: # fix unquoted strings if (tokid == token.NAME): if", "import json import token, tokenize def ymd_to_date(y, m, d): \"\"\"", "#from pandas.tseries.frequencies import to_offset from six.moves import cStringIO as StringIO", "means USA. \"expiry\": expiration date for this option \"name\": I", "have never seen a value for this \"oi\": open interest.", "'d': date.day } return(d) def fix_lazy_json(in_text): \"\"\" Handle lazy JSON", "property name this function fixes the json output from google", "this function fixes the json output from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\"", "import token, tokenize def ymd_to_date(y, m, d): \"\"\" Returns date", "u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') result.append((tokid, tokval)) return tokenize.untokenize(result) def", "I think this tells us something about what country where", "= df data['underlying_id'] = int(data['underlying_id']) data['expiry'] = ymd_to_date(**data['expiry']) for i,", "typ lst.append(df_typ) del data[typ] lst.append(df_typ) df = pd.concat(lst, axis=0, ignore_index=True)", "the json output from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen = tokenize.generate_tokens(StringIO(in_text).readline)", "data = self._get_content(url, params) for typ in [u'puts', u'calls']: df_typ", "tokval[1:-1].replace ('\"', '\\\\\"') result.append((tokid, tokval)) return tokenize.untokenize(result) def json_decode(json_string): try:", "def json_decode(json_string): try: ret = json.loads(json_string) except: json_string = fix_lazy_json(json_string)" ]
[ "embed, rate=1): label_set, len_all = self.preprocess_get_label_set(path) # 获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用", "keras_textclassification.data_preprocess.text_preprocess import load_json, save_json from keras_textclassification.conf.path_config import path_model_dir path_fast_text_model_vocab2index =", "+ 'l2i_i2l.json' import numpy as np import os class PreprocessGenerator:", "return pred_i2l_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def prereocess_pred_xid(self, pred):", "path_model_dir + 'vocab2index.json' path_fast_text_model_l2i_i2l = path_model_dir + 'l2i_i2l.json' import numpy", "= index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else: l2i_i2l = load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例", "+= 1 if cnt == batch_size: if embedding_type in ['bert',", "label_set, len_all def preprocess_label_ques_to_idx(self, embedding_type, batch_size, path, embed, rate=1): label_set,", "label = \"NAN\" if label == \"\" else label que_embed", "= self.preprocess_get_label_set(path) # 获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用 if not os.path.exists(path_fast_text_model_l2i_i2l): count", "label_zeros = [0] * len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] = 1 return que_embed,", "preprocess_label_ques_to_idx(self, embedding_type, batch_size, path, embed, rate=1): label_set, len_all = self.preprocess_get_label_set(path)", "str(line_sp[0]).strip().upper() label_real = \"NAN\" if label_org==\"\" else label_org label_set.add(label_real) file_csv.close()", "* len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] = 1 return que_embed, label_zeros while True:", "if embedding_type in ['bert', 'albert']: x_, y_ = np.array(x), np.array(y)", "= \"NAN\" if label_org==\"\" else label_org label_set.add(label_real) file_csv.close() return label_set,", "utf-8 -*- # @time : 2019/11/2 21:08 # @author :", "= line.split(\",\") ques = str(line_sp[1]).strip().upper() label = str(line_sp[0]).strip().upper() label =", "# -*- coding: utf-8 -*- # @time : 2019/11/2 21:08", "label_set: label2index[label_one] = count index2label[count] = label_one count = count", "count + 1 l2i_i2l = {} l2i_i2l['l2i'] = label2index l2i_i2l['i2l']", "= {} index2label = {} for label_one in label_set: label2index[label_one]", "= count index2label[count] = label_one count = count + 1", "class PreprocessGenerator: \"\"\" 数据预处理, 输入为csv格式, [label,ques] \"\"\" def __init__(self): self.l2i_i2l", "count = count + 1 l2i_i2l = {} l2i_i2l['l2i'] =", "load_json, save_json from keras_textclassification.conf.path_config import path_model_dir path_fast_text_model_vocab2index = path_model_dir +", "输入为csv格式, [label,ques] \"\"\" def __init__(self): self.l2i_i2l = None if os.path.exists(path_fast_text_model_l2i_i2l):", "1 if len_all > 1: # 第一条是标签'label,ques',不选择 line_sp = line.split(\",\")", "对每一条数据操作,获取label和问句index line_sp = line.split(\",\") ques = str(line_sp[1]).strip().upper() label = str(line_sp[0]).strip().upper()", "for x in x_]) x_3 = np.array([x[2][0] for x in", "'albert']: x_, y_ = np.array(x), np.array(y) x_1 = np.array([x[0] for", "# sample时候不生效,使得语料足够训练 len_ql = len_all def process_line(line): # 对每一条数据操作,获取label和问句index line_sp", "len_all = 0 file_csv = open(path, \"r\", encoding=\"utf-8\") for line", "# 跳出循环 if len_ql < cout_all_line: break for line in", "> 1: # 第一条是标签'label,ques',不选择 line_sp = line.split(\",\") label_org = str(line_sp[0]).strip().upper()", "== 'xlnet': x_, y_ = x, np.array(y) x_1 = np.array([x[0][0]", "len_all def preprocess_label_ques_to_idx(self, embedding_type, batch_size, path, embed, rate=1): label_set, len_all", "len_all = self.preprocess_get_label_set(path) # 获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用 if not os.path.exists(path_fast_text_model_l2i_i2l):", "k: k[1], reverse=True)] return pred_i2l_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\")", "for label_one in label_set: label2index[label_one] = count index2label[count] = label_one", "in range(len(pred)): pred_l2i[pred[i]] = l2i[pred[i]] pred_l2i_rank = [sorted(pred_l2i.items(), key=lambda k:", "path_model_dir + 'l2i_i2l.json' import numpy as np import os class", "# 第一条是标签'label,ques',不选择 x_line, y_line = process_line(line) x.append(x_line) y.append(y_line) cnt +=", "numpy as np import os class PreprocessGenerator: \"\"\" 数据预处理, 输入为csv格式,", "len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] = 1 return que_embed, label_zeros while True: file_csv", "np.array([x[1][0] for x in x_]) x_3 = np.array([x[2][0] for x", "for i in range(len(pred)): pred_l2i[pred[i]] = l2i[pred[i]] pred_l2i_rank = [sorted(pred_l2i.items(),", "= np.array([x[1] for x in x_]) x_all = [x_1, x_2]", "cnt = 0 x, y = [], [] # 跳出循环", "keras_textclassification.conf.path_config import path_model_dir path_fast_text_model_vocab2index = path_model_dir + 'vocab2index.json' path_fast_text_model_l2i_i2l =", "获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用 if not os.path.exists(path_fast_text_model_l2i_i2l): count = 0 label2index", "-*- # @time : 2019/11/2 21:08 # @author : Mo", "\"r\", encoding=\"utf-8\") cout_all_line = 0 cnt = 0 x, y", "= [sorted(pred_l2i.items(), key=lambda k: k[1], reverse=True)] return pred_l2i_rank else: raise", "file_csv: cout_all_line += 1 if cout_all_line > 1: # 第一条是标签'label,ques',不选择", "np.array([x[2][0] for x in x_]) x_all = [x_1, x_2, x_3]", "0 x, y = [], [] # 跳出循环 if len_ql", "embed.sentence2idx(ques) label_zeros = [0] * len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] = 1 return", "= l2i[pred[i]] pred_l2i_rank = [sorted(pred_l2i.items(), key=lambda k: k[1], reverse=True)] return", "in ['bert', 'albert']: x_, y_ = np.array(x), np.array(y) x_1 =", "self.l2i_i2l = None if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self,", "in file_csv: cout_all_line += 1 if cout_all_line > 1: #", "i in range(len(pred)): pred_l2i[pred[i]] = l2i[pred[i]] pred_l2i_rank = [sorted(pred_l2i.items(), key=lambda", "x_]) x_all = [x_1, x_2, x_3] else: x_all, y_ =", "= self.l2i_i2l['l2i'] for i in range(len(pred)): pred_l2i[pred[i]] = l2i[pred[i]] pred_l2i_rank", "# @author : Mo # @function: from keras_textclassification.data_preprocess.text_preprocess import load_json,", "if not os.path.exists(path_fast_text_model_l2i_i2l): count = 0 label2index = {} index2label", "from keras_textclassification.conf.path_config import path_model_dir path_fast_text_model_vocab2index = path_model_dir + 'vocab2index.json' path_fast_text_model_l2i_i2l", "1 if cout_all_line > 1: # 第一条是标签'label,ques',不选择 x_line, y_line =", "x.append(x_line) y.append(y_line) cnt += 1 if cnt == batch_size: if", "{} l2i_i2l['l2i'] = label2index l2i_i2l['i2l'] = index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else:", "import numpy as np import os class PreprocessGenerator: \"\"\" 数据预处理,", "x_]) x_3 = np.array([x[2][0] for x in x_]) x_all =", "np.array(x), np.array(y) x_1 = np.array([x[0] for x in x_]) x_2", "[sorted(pred_i2l.items(), key=lambda k: k[1], reverse=True)] return pred_i2l_rank else: raise RuntimeError(\"path_fast_text_model_label2index", "# 首先获取label,set,即存在的具体类 label_set = set() len_all = 0 file_csv =", "k[1], reverse=True)] return pred_l2i_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def", "in x_]) x_all = [x_1, x_2, x_3] else: x_all, y_", "batch_size: if embedding_type in ['bert', 'albert']: x_, y_ = np.array(x),", ": Mo # @function: from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json from", "os.path.exists(path_fast_text_model_l2i_i2l): count = 0 label2index = {} index2label = {}", "reverse=True)] return pred_l2i_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def preprocess_get_label_set(self,", "= str(line_sp[0]).strip().upper() label_real = \"NAN\" if label_org==\"\" else label_org label_set.add(label_real)", "from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json from keras_textclassification.conf.path_config import path_model_dir path_fast_text_model_vocab2index", "x_3] else: x_all, y_ = np.array(x), np.array(y) cnt = 0", "数据预处理, 输入为csv格式, [label,ques] \"\"\" def __init__(self): self.l2i_i2l = None if", "\"r\", encoding=\"utf-8\") for line in file_csv: len_all += 1 if", "label_one count = count + 1 l2i_i2l = {} l2i_i2l['l2i']", "np.array(x), np.array(y) cnt = 0 yield (x_all, y_) x, y", "= {} for label_one in label_set: label2index[label_one] = count index2label[count]", "{} for label_one in label_set: label2index[label_one] = count index2label[count] =", "embedding_type == 'xlnet': x_, y_ = x, np.array(y) x_1 =", "<reponame>Vail-qin/Keras-TextClassification # !/usr/bin/python # -*- coding: utf-8 -*- # @time", "line.split(\",\") ques = str(line_sp[1]).strip().upper() label = str(line_sp[0]).strip().upper() label = \"NAN\"", "def preprocess_get_label_set(self, path): # 首先获取label,set,即存在的具体类 label_set = set() len_all =", "# 对每一条数据操作,获取label和问句index line_sp = line.split(\",\") ques = str(line_sp[1]).strip().upper() label =", "len_ql = int(rate * len_all) if len_ql <= 500: #", "raise RuntimeError(\"path_fast_text_model_label2index is None\") def prereocess_pred_xid(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i", "pred_i2l[i2l[str(i)]] = pred[i] pred_i2l_rank = [sorted(pred_i2l.items(), key=lambda k: k[1], reverse=True)]", "x_2 = np.array([x[1][0] for x in x_]) x_3 = np.array([x[2][0]", "500: # sample时候不生效,使得语料足够训练 len_ql = len_all def process_line(line): # 对每一条数据操作,获取label和问句index", "len_all += 1 if len_all > 1: # 第一条是标签'label,ques',不选择 line_sp", "self.l2i_i2l['i2l'] for i in range(len(pred)): pred_i2l[i2l[str(i)]] = pred[i] pred_i2l_rank =", "cout_all_line = 0 cnt = 0 x, y = [],", "as np import os class PreprocessGenerator: \"\"\" 数据预处理, 输入为csv格式, [label,ques]", "self.preprocess_get_label_set(path) # 获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用 if not os.path.exists(path_fast_text_model_l2i_i2l): count =", "= 0 cnt = 0 x, y = [], []", "label_org==\"\" else label_org label_set.add(label_real) file_csv.close() return label_set, len_all def preprocess_label_ques_to_idx(self,", "else: l2i_i2l = load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例 len_ql = int(rate *", "\"\"\" def __init__(self): self.l2i_i2l = None if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l =", "def preprocess_label_ques_to_idx(self, embedding_type, batch_size, path, embed, rate=1): label_set, len_all =", "x in x_]) x_2 = np.array([x[1] for x in x_])", "for i in range(len(pred)): pred_i2l[i2l[str(i)]] = pred[i] pred_i2l_rank = [sorted(pred_i2l.items(),", "'vocab2index.json' path_fast_text_model_l2i_i2l = path_model_dir + 'l2i_i2l.json' import numpy as np", "index2label = {} for label_one in label_set: label2index[label_one] = count", "cout_all_line: break for line in file_csv: cout_all_line += 1 if", "pred[i] pred_i2l_rank = [sorted(pred_i2l.items(), key=lambda k: k[1], reverse=True)] return pred_i2l_rank", "pred_i2l_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def prereocess_pred_xid(self, pred): if", "= {} l2i = self.l2i_i2l['l2i'] for i in range(len(pred)): pred_l2i[pred[i]]", "index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else: l2i_i2l = load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例 len_ql", "= 0 yield (x_all, y_) x, y =[], [] file_csv.close()", "# 获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用 if not os.path.exists(path_fast_text_model_l2i_i2l): count = 0", "not os.path.exists(path_fast_text_model_l2i_i2l): count = 0 label2index = {} index2label =", "x_, y_ = np.array(x), np.array(y) x_1 = np.array([x[0] for x", "len_all > 1: # 第一条是标签'label,ques',不选择 line_sp = line.split(\",\") label_org =", "len_ql <= 500: # sample时候不生效,使得语料足够训练 len_ql = len_all def process_line(line):", "process_line(line) x.append(x_line) y.append(y_line) cnt += 1 if cnt == batch_size:", "label_set.add(label_real) file_csv.close() return label_set, len_all def preprocess_label_ques_to_idx(self, embedding_type, batch_size, path,", "raise RuntimeError(\"path_fast_text_model_label2index is None\") def preprocess_get_label_set(self, path): # 首先获取label,set,即存在的具体类 label_set", "= \"NAN\" if label == \"\" else label que_embed =", "+= 1 if cout_all_line > 1: # 第一条是标签'label,ques',不选择 x_line, y_line", "def process_line(line): # 对每一条数据操作,获取label和问句index line_sp = line.split(\",\") ques = str(line_sp[1]).strip().upper()", "@function: from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json from keras_textclassification.conf.path_config import path_model_dir", "len_all) if len_ql <= 500: # sample时候不生效,使得语料足够训练 len_ql = len_all", "+ 1 l2i_i2l = {} l2i_i2l['l2i'] = label2index l2i_i2l['i2l'] =", "cnt = 0 yield (x_all, y_) x, y =[], []", "import os class PreprocessGenerator: \"\"\" 数据预处理, 输入为csv格式, [label,ques] \"\"\" def", "0 cnt = 0 x, y = [], [] #", "for line in file_csv: len_all += 1 if len_all >", "label_set = set() len_all = 0 file_csv = open(path, \"r\",", "ques = str(line_sp[1]).strip().upper() label = str(line_sp[0]).strip().upper() label = \"NAN\" if", "<= 500: # sample时候不生效,使得语料足够训练 len_ql = len_all def process_line(line): #", "if cnt == batch_size: if embedding_type in ['bert', 'albert']: x_,", "0 yield (x_all, y_) x, y =[], [] file_csv.close() print(\"preprocess_label_ques_to_idx", "[x_1, x_2, x_3] else: x_all, y_ = np.array(x), np.array(y) cnt", "pred_l2i_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def preprocess_get_label_set(self, path): #", "count index2label[count] = label_one count = count + 1 l2i_i2l", "== \"\" else label que_embed = embed.sentence2idx(ques) label_zeros = [0]", "[] # 跳出循环 if len_ql < cout_all_line: break for line", "dev验证集合的时候用 if not os.path.exists(path_fast_text_model_l2i_i2l): count = 0 label2index = {}", "= [x_1, x_2, x_3] else: x_all, y_ = np.array(x), np.array(y)", "x_, y_ = x, np.array(y) x_1 = np.array([x[0][0] for x", "else label que_embed = embed.sentence2idx(ques) label_zeros = [0] * len(l2i_i2l['l2i'])", "for x in x_]) x_2 = np.array([x[1] for x in", "if label_org==\"\" else label_org label_set.add(label_real) file_csv.close() return label_set, len_all def", "= process_line(line) x.append(x_line) y.append(y_line) cnt += 1 if cnt ==", "\"\"\" 数据预处理, 输入为csv格式, [label,ques] \"\"\" def __init__(self): self.l2i_i2l = None", "label2index = {} index2label = {} for label_one in label_set:", "k: k[1], reverse=True)] return pred_l2i_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\")", "if len_ql <= 500: # sample时候不生效,使得语料足够训练 len_ql = len_all def", "str(line_sp[0]).strip().upper() label = \"NAN\" if label == \"\" else label", "= load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l = {}", "x_all = [x_1, x_2] elif embedding_type == 'xlnet': x_, y_", "len_ql < cout_all_line: break for line in file_csv: cout_all_line +=", "str(line_sp[1]).strip().upper() label = str(line_sp[0]).strip().upper() label = \"NAN\" if label ==", "index2label[count] = label_one count = count + 1 l2i_i2l =", "prereocess_idx(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l = {} i2l = self.l2i_i2l['i2l']", "RuntimeError(\"path_fast_text_model_label2index is None\") def prereocess_pred_xid(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i =", "open(path, \"r\", encoding=\"utf-8\") cout_all_line = 0 cnt = 0 x,", "for line in file_csv: cout_all_line += 1 if cout_all_line >", "label = str(line_sp[0]).strip().upper() label = \"NAN\" if label == \"\"", "= np.array([x[2][0] for x in x_]) x_all = [x_1, x_2,", "is None\") def preprocess_get_label_set(self, path): # 首先获取label,set,即存在的具体类 label_set = set()", "encoding=\"utf-8\") for line in file_csv: len_all += 1 if len_all", "1 l2i_i2l = {} l2i_i2l['l2i'] = label2index l2i_i2l['i2l'] = index2label", "if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i = {} l2i = self.l2i_i2l['l2i'] for i", "elif embedding_type == 'xlnet': x_, y_ = x, np.array(y) x_1", "file_csv: len_all += 1 if len_all > 1: # 第一条是标签'label,ques',不选择", "np.array(y) x_1 = np.array([x[0] for x in x_]) x_2 =", "x in x_]) x_2 = np.array([x[1][0] for x in x_])", "path, embed, rate=1): label_set, len_all = self.preprocess_get_label_set(path) # 获取label转index字典等, 如果label2index存在则不转换了,", "= np.array(x), np.array(y) cnt = 0 yield (x_all, y_) x,", "path): # 首先获取label,set,即存在的具体类 label_set = set() len_all = 0 file_csv", "os class PreprocessGenerator: \"\"\" 数据预处理, 输入为csv格式, [label,ques] \"\"\" def __init__(self):", "file_csv = open(path, \"r\", encoding=\"utf-8\") for line in file_csv: len_all", "0 label2index = {} index2label = {} for label_one in", "if len_all > 1: # 第一条是标签'label,ques',不选择 line_sp = line.split(\",\") label_org", "== batch_size: if embedding_type in ['bert', 'albert']: x_, y_ =", "@time : 2019/11/2 21:08 # @author : Mo # @function:", ": 2019/11/2 21:08 # @author : Mo # @function: from", "# !/usr/bin/python # -*- coding: utf-8 -*- # @time :", "l2i_i2l = {} l2i_i2l['l2i'] = label2index l2i_i2l['i2l'] = index2label save_json(l2i_i2l,", "= load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例 len_ql = int(rate * len_all) if", "import load_json, save_json from keras_textclassification.conf.path_config import path_model_dir path_fast_text_model_vocab2index = path_model_dir", "= self.l2i_i2l['i2l'] for i in range(len(pred)): pred_i2l[i2l[str(i)]] = pred[i] pred_i2l_rank", "save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else: l2i_i2l = load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例 len_ql =", "label_zeros while True: file_csv = open(path, \"r\", encoding=\"utf-8\") cout_all_line =", "y.append(y_line) cnt += 1 if cnt == batch_size: if embedding_type", "encoding=\"utf-8\") cout_all_line = 0 cnt = 0 x, y =", "\"\" else label que_embed = embed.sentence2idx(ques) label_zeros = [0] *", "= int(rate * len_all) if len_ql <= 500: # sample时候不生效,使得语料足够训练", "x_2, x_3] else: x_all, y_ = np.array(x), np.array(y) cnt =", "[sorted(pred_l2i.items(), key=lambda k: k[1], reverse=True)] return pred_l2i_rank else: raise RuntimeError(\"path_fast_text_model_label2index", "return label_set, len_all def preprocess_label_ques_to_idx(self, embedding_type, batch_size, path, embed, rate=1):", "跳出循环 if len_ql < cout_all_line: break for line in file_csv:", "pred_l2i_rank = [sorted(pred_l2i.items(), key=lambda k: k[1], reverse=True)] return pred_l2i_rank else:", "l2i = self.l2i_i2l['l2i'] for i in range(len(pred)): pred_l2i[pred[i]] = l2i[pred[i]]", "= pred[i] pred_i2l_rank = [sorted(pred_i2l.items(), key=lambda k: k[1], reverse=True)] return", "import path_model_dir path_fast_text_model_vocab2index = path_model_dir + 'vocab2index.json' path_fast_text_model_l2i_i2l = path_model_dir", "= 0 file_csv = open(path, \"r\", encoding=\"utf-8\") for line in", "in x_]) x_3 = np.array([x[2][0] for x in x_]) x_all", "x_all, y_ = np.array(x), np.array(y) cnt = 0 yield (x_all,", "range(len(pred)): pred_i2l[i2l[str(i)]] = pred[i] pred_i2l_rank = [sorted(pred_i2l.items(), key=lambda k: k[1],", "file_csv = open(path, \"r\", encoding=\"utf-8\") cout_all_line = 0 cnt =", "k[1], reverse=True)] return pred_i2l_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def", "label2index l2i_i2l['i2l'] = index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else: l2i_i2l = load_json(path_fast_text_model_l2i_i2l)", "os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i = {} l2i = self.l2i_i2l['l2i'] for i in", "# @function: from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json from keras_textclassification.conf.path_config import", "line.split(\",\") label_org = str(line_sp[0]).strip().upper() label_real = \"NAN\" if label_org==\"\" else", "if label == \"\" else label que_embed = embed.sentence2idx(ques) label_zeros", "1: # 第一条是标签'label,ques',不选择 x_line, y_line = process_line(line) x.append(x_line) y.append(y_line) cnt", "in x_]) x_2 = np.array([x[1] for x in x_]) x_all", "label_zeros[l2i_i2l['l2i'][label]] = 1 return que_embed, label_zeros while True: file_csv =", "l2i[pred[i]] pred_l2i_rank = [sorted(pred_l2i.items(), key=lambda k: k[1], reverse=True)] return pred_l2i_rank", "line_sp = line.split(\",\") ques = str(line_sp[1]).strip().upper() label = str(line_sp[0]).strip().upper() label", "save_json from keras_textclassification.conf.path_config import path_model_dir path_fast_text_model_vocab2index = path_model_dir + 'vocab2index.json'", "in label_set: label2index[label_one] = count index2label[count] = label_one count =", "= embed.sentence2idx(ques) label_zeros = [0] * len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] = 1", "1 if cnt == batch_size: if embedding_type in ['bert', 'albert']:", "for x in x_]) x_all = [x_1, x_2, x_3] else:", "-*- coding: utf-8 -*- # @time : 2019/11/2 21:08 #", "pred_i2l = {} i2l = self.l2i_i2l['i2l'] for i in range(len(pred)):", "= open(path, \"r\", encoding=\"utf-8\") for line in file_csv: len_all +=", "if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l = {} i2l = self.l2i_i2l['i2l'] for i", "= label_one count = count + 1 l2i_i2l = {}", "如果label2index存在则不转换了, dev验证集合的时候用 if not os.path.exists(path_fast_text_model_l2i_i2l): count = 0 label2index =", "x in x_]) x_all = [x_1, x_2] elif embedding_type ==", "l2i_i2l = load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例 len_ql = int(rate * len_all)", "None\") def prereocess_pred_xid(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i = {} l2i", "path_fast_text_model_vocab2index = path_model_dir + 'vocab2index.json' path_fast_text_model_l2i_i2l = path_model_dir + 'l2i_i2l.json'", "{} i2l = self.l2i_i2l['i2l'] for i in range(len(pred)): pred_i2l[i2l[str(i)]] =", "= 1 return que_embed, label_zeros while True: file_csv = open(path,", "y_line = process_line(line) x.append(x_line) y.append(y_line) cnt += 1 if cnt", "i in range(len(pred)): pred_i2l[i2l[str(i)]] = pred[i] pred_i2l_rank = [sorted(pred_i2l.items(), key=lambda", "pred_i2l_rank = [sorted(pred_i2l.items(), key=lambda k: k[1], reverse=True)] return pred_i2l_rank else:", "x_3 = np.array([x[2][0] for x in x_]) x_all = [x_1,", "'l2i_i2l.json' import numpy as np import os class PreprocessGenerator: \"\"\"", "[0] * len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] = 1 return que_embed, label_zeros while", "return que_embed, label_zeros while True: file_csv = open(path, \"r\", encoding=\"utf-8\")", "x_2 = np.array([x[1] for x in x_]) x_all = [x_1,", "path_fast_text_model_l2i_i2l) else: l2i_i2l = load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例 len_ql = int(rate", "x_all = [x_1, x_2, x_3] else: x_all, y_ = np.array(x),", "np.array(y) cnt = 0 yield (x_all, y_) x, y =[],", "[label,ques] \"\"\" def __init__(self): self.l2i_i2l = None if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l", "label que_embed = embed.sentence2idx(ques) label_zeros = [0] * len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]]", "= open(path, \"r\", encoding=\"utf-8\") cout_all_line = 0 cnt = 0", "= path_model_dir + 'vocab2index.json' path_fast_text_model_l2i_i2l = path_model_dir + 'l2i_i2l.json' import", "= {} i2l = self.l2i_i2l['i2l'] for i in range(len(pred)): pred_i2l[i2l[str(i)]]", "np import os class PreprocessGenerator: \"\"\" 数据预处理, 输入为csv格式, [label,ques] \"\"\"", "line in file_csv: len_all += 1 if len_all > 1:", "= x, np.array(y) x_1 = np.array([x[0][0] for x in x_])", "pred_l2i = {} l2i = self.l2i_i2l['l2i'] for i in range(len(pred)):", "path_fast_text_model_l2i_i2l = path_model_dir + 'l2i_i2l.json' import numpy as np import", "in file_csv: len_all += 1 if len_all > 1: #", "读取数据的比例 len_ql = int(rate * len_all) if len_ql <= 500:", "= count + 1 l2i_i2l = {} l2i_i2l['l2i'] = label2index", "= np.array([x[0] for x in x_]) x_2 = np.array([x[1] for", "self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l =", "in x_]) x_2 = np.array([x[1][0] for x in x_]) x_3", "x_]) x_all = [x_1, x_2] elif embedding_type == 'xlnet': x_,", "= path_model_dir + 'l2i_i2l.json' import numpy as np import os", "label_real = \"NAN\" if label_org==\"\" else label_org label_set.add(label_real) file_csv.close() return", "return pred_l2i_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def preprocess_get_label_set(self, path):", "[], [] # 跳出循环 if len_ql < cout_all_line: break for", "x_line, y_line = process_line(line) x.append(x_line) y.append(y_line) cnt += 1 if", "cnt == batch_size: if embedding_type in ['bert', 'albert']: x_, y_", "cout_all_line += 1 if cout_all_line > 1: # 第一条是标签'label,ques',不选择 x_line,", "= set() len_all = 0 file_csv = open(path, \"r\", encoding=\"utf-8\")", "> 1: # 第一条是标签'label,ques',不选择 x_line, y_line = process_line(line) x.append(x_line) y.append(y_line)", "else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def preprocess_get_label_set(self, path): # 首先获取label,set,即存在的具体类", "[x_1, x_2] elif embedding_type == 'xlnet': x_, y_ = x,", "set() len_all = 0 file_csv = open(path, \"r\", encoding=\"utf-8\") for", "= None if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self, pred):", "{} l2i = self.l2i_i2l['l2i'] for i in range(len(pred)): pred_l2i[pred[i]] =", "np.array([x[0][0] for x in x_]) x_2 = np.array([x[1][0] for x", "file_csv.close() return label_set, len_all def preprocess_label_ques_to_idx(self, embedding_type, batch_size, path, embed,", "x_]) x_2 = np.array([x[1][0] for x in x_]) x_3 =", "key=lambda k: k[1], reverse=True)] return pred_l2i_rank else: raise RuntimeError(\"path_fast_text_model_label2index is", "que_embed, label_zeros while True: file_csv = open(path, \"r\", encoding=\"utf-8\") cout_all_line", "que_embed = embed.sentence2idx(ques) label_zeros = [0] * len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] =", "label2index[label_one] = count index2label[count] = label_one count = count +", "coding: utf-8 -*- # @time : 2019/11/2 21:08 # @author", "break for line in file_csv: cout_all_line += 1 if cout_all_line", "x_1 = np.array([x[0] for x in x_]) x_2 = np.array([x[1]", "for x in x_]) x_all = [x_1, x_2] elif embedding_type", "load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例 len_ql = int(rate * len_all) if len_ql", "__init__(self): self.l2i_i2l = None if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l) def", "np.array([x[0] for x in x_]) x_2 = np.array([x[1] for x", "np.array(y) x_1 = np.array([x[0][0] for x in x_]) x_2 =", "label_org = str(line_sp[0]).strip().upper() label_real = \"NAN\" if label_org==\"\" else label_org", "= np.array([x[0][0] for x in x_]) x_2 = np.array([x[1][0] for", "reverse=True)] return pred_i2l_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def prereocess_pred_xid(self,", "None\") def preprocess_get_label_set(self, path): # 首先获取label,set,即存在的具体类 label_set = set() len_all", "os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l", "def prereocess_pred_xid(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i = {} l2i =", "if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l):", "label_set, len_all = self.preprocess_get_label_set(path) # 获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用 if not", "!/usr/bin/python # -*- coding: utf-8 -*- # @time : 2019/11/2", "y_ = np.array(x), np.array(y) x_1 = np.array([x[0] for x in", "label == \"\" else label que_embed = embed.sentence2idx(ques) label_zeros =", "# @time : 2019/11/2 21:08 # @author : Mo #", "label_org label_set.add(label_real) file_csv.close() return label_set, len_all def preprocess_label_ques_to_idx(self, embedding_type, batch_size,", "cnt += 1 if cnt == batch_size: if embedding_type in", "for x in x_]) x_2 = np.array([x[1][0] for x in", "首先获取label,set,即存在的具体类 label_set = set() len_all = 0 file_csv = open(path,", "0 file_csv = open(path, \"r\", encoding=\"utf-8\") for line in file_csv:", "if len_ql < cout_all_line: break for line in file_csv: cout_all_line", "y_ = x, np.array(y) x_1 = np.array([x[0][0] for x in", "True: file_csv = open(path, \"r\", encoding=\"utf-8\") cout_all_line = 0 cnt", "* len_all) if len_ql <= 500: # sample时候不生效,使得语料足够训练 len_ql =", "= str(line_sp[0]).strip().upper() label = \"NAN\" if label == \"\" else", "cout_all_line > 1: # 第一条是标签'label,ques',不选择 x_line, y_line = process_line(line) x.append(x_line)", "Mo # @function: from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json from keras_textclassification.conf.path_config", "= label2index l2i_i2l['i2l'] = index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else: l2i_i2l =", "x in x_]) x_all = [x_1, x_2, x_3] else: x_all,", "第一条是标签'label,ques',不选择 x_line, y_line = process_line(line) x.append(x_line) y.append(y_line) cnt += 1", "prereocess_pred_xid(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i = {} l2i = self.l2i_i2l['l2i']", "# 读取数据的比例 len_ql = int(rate * len_all) if len_ql <=", "\"NAN\" if label == \"\" else label que_embed = embed.sentence2idx(ques)", "while True: file_csv = open(path, \"r\", encoding=\"utf-8\") cout_all_line = 0", "= line.split(\",\") label_org = str(line_sp[0]).strip().upper() label_real = \"NAN\" if label_org==\"\"", "l2i_i2l['i2l'] = index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else: l2i_i2l = load_json(path_fast_text_model_l2i_i2l) #", "pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i = {} l2i = self.l2i_i2l['l2i'] for", "y = [], [] # 跳出循环 if len_ql < cout_all_line:", "else: x_all, y_ = np.array(x), np.array(y) cnt = 0 yield", "def __init__(self): self.l2i_i2l = None if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l)", "+= 1 if len_all > 1: # 第一条是标签'label,ques',不选择 line_sp =", "1: # 第一条是标签'label,ques',不选择 line_sp = line.split(\",\") label_org = str(line_sp[0]).strip().upper() label_real", "x, y = [], [] # 跳出循环 if len_ql <", "sample时候不生效,使得语料足够训练 len_ql = len_all def process_line(line): # 对每一条数据操作,获取label和问句index line_sp =", "load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l = {} i2l", "= [sorted(pred_i2l.items(), key=lambda k: k[1], reverse=True)] return pred_i2l_rank else: raise", "len_all def process_line(line): # 对每一条数据操作,获取label和问句index line_sp = line.split(\",\") ques =", "i2l = self.l2i_i2l['i2l'] for i in range(len(pred)): pred_i2l[i2l[str(i)]] = pred[i]", "= {} l2i_i2l['l2i'] = label2index l2i_i2l['i2l'] = index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l)", "= [], [] # 跳出循环 if len_ql < cout_all_line: break", "if cout_all_line > 1: # 第一条是标签'label,ques',不选择 x_line, y_line = process_line(line)", "is None\") def prereocess_pred_xid(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i = {}", "= 0 label2index = {} index2label = {} for label_one", "21:08 # @author : Mo # @function: from keras_textclassification.data_preprocess.text_preprocess import", "open(path, \"r\", encoding=\"utf-8\") for line in file_csv: len_all += 1", "pred_l2i[pred[i]] = l2i[pred[i]] pred_l2i_rank = [sorted(pred_l2i.items(), key=lambda k: k[1], reverse=True)]", "else label_org label_set.add(label_real) file_csv.close() return label_set, len_all def preprocess_label_ques_to_idx(self, embedding_type,", "x_]) x_2 = np.array([x[1] for x in x_]) x_all =", "PreprocessGenerator: \"\"\" 数据预处理, 输入为csv格式, [label,ques] \"\"\" def __init__(self): self.l2i_i2l =", "x_2] elif embedding_type == 'xlnet': x_, y_ = x, np.array(y)", "rate=1): label_set, len_all = self.preprocess_get_label_set(path) # 获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用 if", "1 return que_embed, label_zeros while True: file_csv = open(path, \"r\",", "in x_]) x_all = [x_1, x_2] elif embedding_type == 'xlnet':", "path_model_dir path_fast_text_model_vocab2index = path_model_dir + 'vocab2index.json' path_fast_text_model_l2i_i2l = path_model_dir +", "\"NAN\" if label_org==\"\" else label_org label_set.add(label_real) file_csv.close() return label_set, len_all", "range(len(pred)): pred_l2i[pred[i]] = l2i[pred[i]] pred_l2i_rank = [sorted(pred_l2i.items(), key=lambda k: k[1],", "l2i_i2l['l2i'] = label2index l2i_i2l['i2l'] = index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else: l2i_i2l", "'xlnet': x_, y_ = x, np.array(y) x_1 = np.array([x[0][0] for", "['bert', 'albert']: x_, y_ = np.array(x), np.array(y) x_1 = np.array([x[0]", "2019/11/2 21:08 # @author : Mo # @function: from keras_textclassification.data_preprocess.text_preprocess", "第一条是标签'label,ques',不选择 line_sp = line.split(\",\") label_org = str(line_sp[0]).strip().upper() label_real = \"NAN\"", "os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l = {} i2l = self.l2i_i2l['i2l'] for i in", "else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def prereocess_pred_xid(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l):", "= str(line_sp[1]).strip().upper() label = str(line_sp[0]).strip().upper() label = \"NAN\" if label", "process_line(line): # 对每一条数据操作,获取label和问句index line_sp = line.split(\",\") ques = str(line_sp[1]).strip().upper() label", "self.l2i_i2l['l2i'] for i in range(len(pred)): pred_l2i[pred[i]] = l2i[pred[i]] pred_l2i_rank =", "count = 0 label2index = {} index2label = {} for", "= np.array([x[1][0] for x in x_]) x_3 = np.array([x[2][0] for", "pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l = {} i2l = self.l2i_i2l['i2l'] for", "np.array([x[1] for x in x_]) x_all = [x_1, x_2] elif", "= [0] * len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] = 1 return que_embed, label_zeros", "y_ = np.array(x), np.array(y) cnt = 0 yield (x_all, y_)", "= 0 x, y = [], [] # 跳出循环 if", "@author : Mo # @function: from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json", "embedding_type, batch_size, path, embed, rate=1): label_set, len_all = self.preprocess_get_label_set(path) #", "< cout_all_line: break for line in file_csv: cout_all_line += 1", "in range(len(pred)): pred_i2l[i2l[str(i)]] = pred[i] pred_i2l_rank = [sorted(pred_i2l.items(), key=lambda k:", "None if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self, pred): if", "RuntimeError(\"path_fast_text_model_label2index is None\") def preprocess_get_label_set(self, path): # 首先获取label,set,即存在的具体类 label_set =", "key=lambda k: k[1], reverse=True)] return pred_i2l_rank else: raise RuntimeError(\"path_fast_text_model_label2index is", "= len_all def process_line(line): # 对每一条数据操作,获取label和问句index line_sp = line.split(\",\") ques", "batch_size, path, embed, rate=1): label_set, len_all = self.preprocess_get_label_set(path) # 获取label转index字典等,", "= [x_1, x_2] elif embedding_type == 'xlnet': x_, y_ =", "line_sp = line.split(\",\") label_org = str(line_sp[0]).strip().upper() label_real = \"NAN\" if", "= np.array(x), np.array(y) x_1 = np.array([x[0] for x in x_])", "yield (x_all, y_) x, y =[], [] file_csv.close() print(\"preprocess_label_ques_to_idx ok\")", "int(rate * len_all) if len_ql <= 500: # sample时候不生效,使得语料足够训练 len_ql", "# 第一条是标签'label,ques',不选择 line_sp = line.split(\",\") label_org = str(line_sp[0]).strip().upper() label_real =", "line in file_csv: cout_all_line += 1 if cout_all_line > 1:", "+ 'vocab2index.json' path_fast_text_model_l2i_i2l = path_model_dir + 'l2i_i2l.json' import numpy as", "embedding_type in ['bert', 'albert']: x_, y_ = np.array(x), np.array(y) x_1", "label_one in label_set: label2index[label_one] = count index2label[count] = label_one count", "x, np.array(y) x_1 = np.array([x[0][0] for x in x_]) x_2", "def prereocess_idx(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l = {} i2l =", "preprocess_get_label_set(self, path): # 首先获取label,set,即存在的具体类 label_set = set() len_all = 0", "{} index2label = {} for label_one in label_set: label2index[label_one] =", "len_ql = len_all def process_line(line): # 对每一条数据操作,获取label和问句index line_sp = line.split(\",\")", "x in x_]) x_3 = np.array([x[2][0] for x in x_])", "x_1 = np.array([x[0][0] for x in x_]) x_2 = np.array([x[1][0]" ]
[ "for documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: #", "LICENSE file. from gpu_tests.gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations", "license that can be # found in the LICENSE file.", "self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia', 0xfe9)], bug=690277) # TODO(kbr): flakily timing out", "bug=521588) # TODO(ccameron) fix these on Mac Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'],", "timing out on this configuration. self.Flaky('*', ['linux', 'intel', 'debug'], bug=648369)", "are generated. self.Fail('Pixel_DirectComposition_Video_MP4', bug=615325) self.Fail('Pixel_DirectComposition_Video_VP9', bug=615325) self.Fail('Pixel_Video_MP4', bug=615325) self.Fail('Pixel_Video_VP9', bug=615325)", "self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux', 'win', 'android'], bug=735171) # TODO(junov): update reference", "'win', 'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux', 'win', 'android'], bug=735171) #", "0x1912)], bug=690663) # TODO(zakerinasab): check / generate reference images. self.Fail('Pixel_Canvas2DUntagged',", "# ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # Seems to be", "class PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('Pixel_Canvas2DRedBox', #", "Seems to be flaky on the new AMD R7 240", "bug=533690) # TODO(vmiura) check / generate reference images for Android", "BSD-style license that can be # found in the LICENSE", "compositing on Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android'])", "to be flaky on the new AMD R7 240 drivers.", "by a BSD-style license that can be # found in", "# Use of this source code is governed by a", "is governed by a BSD-style license that can be #", "found in the LICENSE file. from gpu_tests.gpu_test_expectations import GpuTestExpectations #", "new AMD R7 240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd', 0x6613)], bug=653538)", "0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia', 0xfe9)], bug=690277) # TODO(kbr): flakily", "bug=690663) # TODO(zakerinasab): check / generate reference images. self.Fail('Pixel_Canvas2DUntagged', bug=713632)", "to investigate # further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel',", "Software compositing is not supported on Android; so we skip", "self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel', 0x1912)], bug=690663) # TODO(zakerinasab): check / generate", "of this source code is governed by a BSD-style license", "supported on Android; so we skip these tests # that", "# Sample Usage: # self.Fail('Pixel_Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)],", "reference images are generated. self.Fail('Pixel_DirectComposition_Video_MP4', bug=615325) self.Fail('Pixel_DirectComposition_Video_VP9', bug=615325) self.Fail('Pixel_Video_MP4', bug=615325)", "reference images self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727) # TODO(dshwang):", "The Chromium Authors. All rights reserved. # Use of this", "Sample Usage: # self.Fail('Pixel_Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123)", "on Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker',", "Flaky for unknown reasons only on macOS. Not planning to", "the LICENSE file. from gpu_tests.gpu_test_expectations import GpuTestExpectations # See the", "reserved. # Use of this source code is governed by", "'intel', 'debug'], bug=648369) self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'], bug=716564) # Flaky for", "devices self.Fail('Pixel_SolidColorBackground', ['mac', 'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia', 0xfe9)], bug=706016)", "be flaky on the new AMD R7 240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox',", "# self.Fail('Pixel_Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # Seems", "gpu_tests.gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation.", "['mac', 'linux', 'win', 'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux', 'win', 'android'],", "TODO(vmiura) check / generate reference images for Android devices self.Fail('Pixel_SolidColorBackground',", "('intel', 0x1912)], bug=690663) # TODO(zakerinasab): check / generate reference images.", "skip these tests # that disables gpu compositing on Android", "import GpuTestExpectations # See the GpuTestExpectations class for documentation. class", "for Android devices self.Fail('Pixel_SolidColorBackground', ['mac', 'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia',", "['linux', 'intel', 'debug'], bug=648369) self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'], bug=716564) # Flaky", "bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia', 0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia', 0xfe9)],", "bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux', 'win', 'android'], bug=735171) # TODO(junov): update", "bug=716564) # Flaky for unknown reasons only on macOS. Not", "# TODO(junov): update reference images self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'],", "See the GpuTestExpectations class for documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self):", "TODO(junov): update reference images self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727)", "# Copyright 2014 The Chromium Authors. All rights reserved. #", "Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android'])", "self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux', 'win', 'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux', 'win',", "so we skip these tests # that disables gpu compositing", "not supported on Android; so we skip these tests #", "bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux', 'win', 'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux',", "code is governed by a BSD-style license that can be", "('nvidia', 0xfe9)], bug=690277) # TODO(kbr): flakily timing out on this", "a BSD-style license that can be # found in the", "['android'], bug=521588) # TODO(ccameron) fix these on Mac Retina self.Fail('Pixel_CSS3DBlueBox',", "from gpu_tests.gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for", "['mac', ('nvidia', 0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia', 0xfe9)], bug=690277) #", "investigate # further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel', 0x1912)],", "['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) # TODO(ccameron) fix these on Mac", "['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) # TODO(ccameron) fix these", "['win', ('amd', 0x6613)], bug=653538) # Software compositing is not supported", "['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) # TODO(ccameron)", "Not planning to investigate # further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker',", "# Flaky for unknown reasons only on macOS. Not planning", "['mac', 'linux', 'win', 'android'], bug=735171) # TODO(junov): update reference images", "reference images. self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux', 'win', 'android'], bug=735228)", "bug=735171) # TODO(junov): update reference images self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays',", "# TODO(dshwang): remove these after new reference images are generated.", "governed by a BSD-style license that can be # found", "self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'],", "['mac', ('nvidia', 0xfe9)], bug=690277) # TODO(kbr): flakily timing out on", "'linux', 'win', 'android'], bug=735171) # TODO(junov): update reference images self.Fail('Pixel_CSSFilterEffects',", "bug=123) # Seems to be flaky on the new AMD", "Use of this source code is governed by a BSD-style", "self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android'])", "/ generate reference images for Android devices self.Fail('Pixel_SolidColorBackground', ['mac', 'android'],", "'linux', 'win', 'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux', 'win', 'android'], bug=735171)", "images. self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux', 'win', 'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize',", "the GpuTestExpectations class for documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self): #", "planning to investigate # further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10',", "Android devices self.Fail('Pixel_SolidColorBackground', ['mac', 'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia', 0xfe9)],", "All rights reserved. # Use of this source code is", "self.Flaky('*', ['linux', 'intel', 'debug'], bug=648369) self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'], bug=716564) #", "['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588)", "self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) #", "generate reference images. self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux', 'win', 'android'],", "# TODO(ccameron) fix these on Mac Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690)", "0xfe9)], bug=690277) # TODO(kbr): flakily timing out on this configuration.", "['android', 'nvidia'], bug=716564) # Flaky for unknown reasons only on", "further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel', 0x1912)], bug=690663) #", "bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727) # TODO(dshwang): remove these after new", "after new reference images are generated. self.Fail('Pixel_DirectComposition_Video_MP4', bug=615325) self.Fail('Pixel_DirectComposition_Video_VP9', bug=615325)", "'win', 'android'], bug=735171) # TODO(junov): update reference images self.Fail('Pixel_CSSFilterEffects', ['mac'],", "2014 The Chromium Authors. All rights reserved. # Use of", "images are generated. self.Fail('Pixel_DirectComposition_Video_MP4', bug=615325) self.Fail('Pixel_DirectComposition_Video_VP9', bug=615325) self.Fail('Pixel_Video_MP4', bug=615325) self.Fail('Pixel_Video_VP9',", "reasons only on macOS. Not planning to investigate # further.", "for unknown reasons only on macOS. Not planning to investigate", "that disables gpu compositing on Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker',", "bug=690277) # TODO(kbr): flakily timing out on this configuration. self.Flaky('*',", "new reference images are generated. self.Fail('Pixel_DirectComposition_Video_MP4', bug=615325) self.Fail('Pixel_DirectComposition_Video_VP9', bug=615325) self.Fail('Pixel_Video_MP4',", "self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) # TODO(ccameron) fix these on", "('nvidia', 0x1234)], bug=123) # Seems to be flaky on the", "'debug'], bug=648369) self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'], bug=716564) # Flaky for unknown", "PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('Pixel_Canvas2DRedBox', # ['mac',", "# TODO(vmiura) check / generate reference images for Android devices", "that can be # found in the LICENSE file. from", "TODO(kbr): flakily timing out on this configuration. self.Flaky('*', ['linux', 'intel',", "check / generate reference images. self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux',", "['mac', 'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia', 0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac',", "unknown reasons only on macOS. Not planning to investigate #", "Copyright 2014 The Chromium Authors. All rights reserved. # Use", "self.Fail('Pixel_Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # Seems to", "these tests # that disables gpu compositing on Android platforms.", "is not supported on Android; so we skip these tests", "SetExpectations(self): # Sample Usage: # self.Fail('Pixel_Canvas2DRedBox', # ['mac', 'amd', ('nvidia',", "['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel', 0x1912)], bug=690663) # TODO(zakerinasab): check", "update reference images self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727) #", "drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd', 0x6613)], bug=653538) # Software compositing is", "bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia', 0xfe9)], bug=690277) # TODO(kbr): flakily timing", "tests # that disables gpu compositing on Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D',", "configuration. self.Flaky('*', ['linux', 'intel', 'debug'], bug=648369) self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'], bug=716564)", "reference images for Android devices self.Fail('Pixel_SolidColorBackground', ['mac', 'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker',", "the new AMD R7 240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd', 0x6613)],", "['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer',", "on Android; so we skip these tests # that disables", "('amd', 0x6613)], bug=653538) # Software compositing is not supported on", "AMD R7 240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd', 0x6613)], bug=653538) #", "macOS. Not planning to investigate # further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461)", "remove these after new reference images are generated. self.Fail('Pixel_DirectComposition_Video_MP4', bug=615325)", "0x6613)], bug=653538) # Software compositing is not supported on Android;", "on this configuration. self.Flaky('*', ['linux', 'intel', 'debug'], bug=648369) self.Flaky('Pixel_Video_MP4', ['android',", "self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd', 0x6613)], bug=653538) # Software compositing is not", "disables gpu compositing on Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android'])", "on macOS. Not planning to investigate # further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'],", "rights reserved. # Use of this source code is governed", "platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D',", "self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727) # TODO(dshwang): remove these after new reference", "'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux', 'win', 'android'], bug=735171) # TODO(junov):", "we skip these tests # that disables gpu compositing on", "['mac'], bug=533690) # TODO(vmiura) check / generate reference images for", "bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel', 0x1912)], bug=690663) # TODO(zakerinasab): check /", "# found in the LICENSE file. from gpu_tests.gpu_test_expectations import GpuTestExpectations", "generate reference images for Android devices self.Fail('Pixel_SolidColorBackground', ['mac', 'android'], bug=624256)", "# Software compositing is not supported on Android; so we", "self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia', 0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia', 0xfe9)], bug=690277)", "Usage: # self.Fail('Pixel_Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) #", "Chromium Authors. All rights reserved. # Use of this source", "self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690) # TODO(vmiura) check / generate reference images", "GpuTestExpectations class for documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample", "R7 240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd', 0x6613)], bug=653538) # Software", "Mac Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690) # TODO(vmiura) check / generate", "on Mac Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690) # TODO(vmiura) check /", "TODO(ccameron) fix these on Mac Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690) #", "images self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727) # TODO(dshwang): remove", "be # found in the LICENSE file. from gpu_tests.gpu_test_expectations import", "out on this configuration. self.Flaky('*', ['linux', 'intel', 'debug'], bug=648369) self.Flaky('Pixel_Video_MP4',", "240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd', 0x6613)], bug=653538) # Software compositing", "documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('Pixel_Canvas2DRedBox',", "this configuration. self.Flaky('*', ['linux', 'intel', 'debug'], bug=648369) self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'],", "flaky on the new AMD R7 240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win',", "self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel', 0x1912)], bug=690663) # TODO(zakerinasab):", "images for Android devices self.Fail('Pixel_SolidColorBackground', ['mac', 'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac',", "this source code is governed by a BSD-style license that", "0x1234)], bug=123) # Seems to be flaky on the new", "on the new AMD R7 240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd',", "gpu compositing on Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing',", "self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) # TODO(ccameron) fix these on Mac Retina", "bug=653538) # Software compositing is not supported on Android; so", "only on macOS. Not planning to investigate # further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer',", "/ generate reference images. self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux', 'win',", "these after new reference images are generated. self.Fail('Pixel_DirectComposition_Video_MP4', bug=615325) self.Fail('Pixel_DirectComposition_Video_VP9',", "TODO(zakerinasab): check / generate reference images. self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac',", "['mac', 'amd', ('nvidia', 0x1234)], bug=123) # Seems to be flaky", "['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727) # TODO(dshwang): remove these after", "TODO(dshwang): remove these after new reference images are generated. self.Fail('Pixel_DirectComposition_Video_MP4',", "'nvidia'], bug=716564) # Flaky for unknown reasons only on macOS.", "file. from gpu_tests.gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class", "self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) # TODO(ccameron) fix", "('nvidia', 0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia', 0xfe9)], bug=690277) # TODO(kbr):", "self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux', 'win', 'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac',", "# See the GpuTestExpectations class for documentation. class PixelExpectations(GpuTestExpectations): def", "# TODO(zakerinasab): check / generate reference images. self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize',", "flakily timing out on this configuration. self.Flaky('*', ['linux', 'intel', 'debug'],", "class for documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage:", "self.Fail('Pixel_SolidColorBackground', ['mac', 'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia', 0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects',", "def SetExpectations(self): # Sample Usage: # self.Fail('Pixel_Canvas2DRedBox', # ['mac', 'amd',", "check / generate reference images for Android devices self.Fail('Pixel_SolidColorBackground', ['mac',", "these on Mac Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690) # TODO(vmiura) check", "compositing is not supported on Android; so we skip these", "fix these on Mac Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690) # TODO(vmiura)", "in the LICENSE file. from gpu_tests.gpu_test_expectations import GpuTestExpectations # See", "# further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel', 0x1912)], bug=690663)", "GpuTestExpectations # See the GpuTestExpectations class for documentation. class PixelExpectations(GpuTestExpectations):", "Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690) # TODO(vmiura) check / generate reference", "bug=648369) self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'], bug=716564) # Flaky for unknown reasons", "source code is governed by a BSD-style license that can", "Authors. All rights reserved. # Use of this source code", "['win10', ('intel', 0x1912)], bug=690663) # TODO(zakerinasab): check / generate reference", "'android'], bug=735171) # TODO(junov): update reference images self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727)", "self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727) # TODO(dshwang): remove these", "bug=721727) # TODO(dshwang): remove these after new reference images are", "Android; so we skip these tests # that disables gpu", "# that disables gpu compositing on Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android'])", "can be # found in the LICENSE file. from gpu_tests.gpu_test_expectations", "self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'], bug=716564) # Flaky for unknown reasons only", "'amd', ('nvidia', 0x1234)], bug=123) # Seems to be flaky on", "# TODO(kbr): flakily timing out on this configuration. self.Flaky('*', ['linux',", "['mac'], bug=721727) # TODO(dshwang): remove these after new reference images", "'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia', 0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia',", "# Seems to be flaky on the new AMD R7" ]
[ "name='BudgetItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('year', models.IntegerField()), ('agency',", "('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={", "('agency', models.TextField()), ('data_source', models.TextField()), ('category', models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent',", "False, }, ), migrations.AddField( model_name='budgetitem', name='upload', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='budget_data_ingest.Upload'), ), ]", "by Django 1.11.13 on 2018-06-08 22:54 from __future__ import unicode_literals", "Generated by Django 1.11.13 on 2018-06-08 22:54 from __future__ import", "related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, },", "('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING',", "django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL),", "], options={ 'abstract': False, }, ), migrations.AddField( model_name='budgetitem', name='upload', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,", "unicode_literals from django.conf import settings import django.contrib.postgres.fields.jsonb from django.db import", "'abstract': False, }, ), migrations.AddField( model_name='budgetitem', name='upload', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='budget_data_ingest.Upload'), ),", "= True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [", "migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='BudgetItem', fields=[ ('id', models.AutoField(auto_created=True,", "('PENDING', 'Pending'), ('STAGED', 'Staged'), ('INSERTED', 'Inserted'), ('DELETED', 'Deleted')], default='LOADING', max_length=10)),", "models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)), ('row_number', models.IntegerField()), ], ), migrations.CreateModel(", "to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.AddField( model_name='budgetitem', name='upload',", "default='LOADING', max_length=10)), ('status_changed_at', models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by',", "'Staged'), ('INSERTED', 'Inserted'), ('DELETED', 'Deleted')], default='LOADING', max_length=10)), ('status_changed_at', models.DateTimeField(null=True)), ('replaces',", "__future__ import unicode_literals from django.conf import settings import django.contrib.postgres.fields.jsonb from", "options={ 'abstract': False, }, ), migrations.AddField( model_name='budgetitem', name='upload', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='budget_data_ingest.Upload'),", "from __future__ import unicode_literals from django.conf import settings import django.contrib.postgres.fields.jsonb", "models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'), ('STAGED', 'Staged'), ('INSERTED', 'Inserted'), ('DELETED', 'Deleted')],", "on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False,", "max_digits=14)), ('row_number', models.IntegerField()), ], ), migrations.CreateModel( name='Upload', fields=[ ('id', models.AutoField(auto_created=True,", "utf-8 -*- # Generated by Django 1.11.13 on 2018-06-08 22:54", "to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ],", "coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-06-08", "models.TextField()), ('data_source', models.TextField()), ('category', models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2,", "('STAGED', 'Staged'), ('INSERTED', 'Inserted'), ('DELETED', 'Deleted')], default='LOADING', max_length=10)), ('status_changed_at', models.DateTimeField(null=True)),", "'Deleted')], default='LOADING', max_length=10)), ('status_changed_at', models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')),", "models.DecimalField(decimal_places=2, max_digits=14)), ('row_number', models.IntegerField()), ], ), migrations.CreateModel( name='Upload', fields=[ ('id',", "('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)), ('row_number', models.IntegerField()), ], ),", "models.TextField()), ('category', models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)), ('row_number',", "import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [", "import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True", "[ migrations.CreateModel( name='BudgetItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('year',", "to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ),", "related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),", "dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='BudgetItem',", "('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('year', models.IntegerField()), ('agency', models.TextField()), ('data_source',", "fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('year', models.IntegerField()), ('agency', models.TextField()),", "('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata',", "import unicode_literals from django.conf import settings import django.contrib.postgres.fields.jsonb from django.db", "-*- # Generated by Django 1.11.13 on 2018-06-08 22:54 from", "models.IntegerField()), ], ), migrations.CreateModel( name='Upload', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False,", "('raw', models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'), ('STAGED',", "('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)),", "('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)),", "('INSERTED', 'Inserted'), ('DELETED', 'Deleted')], default='LOADING', max_length=10)), ('status_changed_at', models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True,", "'Pending'), ('STAGED', 'Staged'), ('INSERTED', 'Inserted'), ('DELETED', 'Deleted')], default='LOADING', max_length=10)), ('status_changed_at',", "models.IntegerField()), ('agency', models.TextField()), ('data_source', models.TextField()), ('category', models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)),", "name='Upload', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at',", "models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)), ('row_number', models.IntegerField()), ],", "models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies =", "django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial =", "('data_source', models.TextField()), ('category', models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)),", "initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations =", "primary_key=True, serialize=False, verbose_name='ID')), ('year', models.IntegerField()), ('agency', models.TextField()), ('data_source', models.TextField()), ('category',", "] operations = [ migrations.CreateModel( name='BudgetItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True,", "settings import django.contrib.postgres.fields.jsonb from django.db import migrations, models import django.db.models.deletion", "models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)), ('validation_results',", "on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,", "1.11.13 on 2018-06-08 22:54 from __future__ import unicode_literals from django.conf", "serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')),", "from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial", "class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ]", "('row_number', models.IntegerField()), ], ), migrations.CreateModel( name='Upload', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True,", "2018-06-08 22:54 from __future__ import unicode_literals from django.conf import settings", "models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.AddField( model_name='budgetitem',", "import django.contrib.postgres.fields.jsonb from django.db import migrations, models import django.db.models.deletion class", "True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel(", "models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status',", "], ), migrations.CreateModel( name='Upload', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),", "migrations.CreateModel( name='Upload', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)),", "('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)),", "from django.conf import settings import django.contrib.postgres.fields.jsonb from django.db import migrations,", "primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file',", "migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies", "Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations", "max_length=10)), ('status_changed_at', models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True,", "('status_changed_at', models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,", "models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)),", "django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'), ('STAGED', 'Staged'), ('INSERTED', 'Inserted'),", "'Inserted'), ('DELETED', 'Deleted')], default='LOADING', max_length=10)), ('status_changed_at', models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,", "[ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='BudgetItem', fields=[ ('id',", "max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)), ('row_number', models.IntegerField()), ], ), migrations.CreateModel( name='Upload',", "models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('year', models.IntegerField()), ('agency', models.TextField()), ('data_source', models.TextField()),", "'Loading'), ('PENDING', 'Pending'), ('STAGED', 'Staged'), ('INSERTED', 'Inserted'), ('DELETED', 'Deleted')], default='LOADING',", "fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)),", "models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter',", "('year', models.IntegerField()), ('agency', models.TextField()), ('data_source', models.TextField()), ('category', models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2,", "serialize=False, verbose_name='ID')), ('year', models.IntegerField()), ('agency', models.TextField()), ('data_source', models.TextField()), ('category', models.TextField()),", "('category', models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)), ('row_number', models.IntegerField()),", "import settings import django.contrib.postgres.fields.jsonb from django.db import migrations, models import", "= [ migrations.CreateModel( name='BudgetItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),", "), migrations.CreateModel( name='Upload', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at',", "models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'), ('STAGED', 'Staged'),", "('DELETED', 'Deleted')], default='LOADING', max_length=10)), ('status_changed_at', models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by',", "-*- coding: utf-8 -*- # Generated by Django 1.11.13 on", "<filename>examples/p02_budgets/budget_data_ingest/migrations/0001_initial.py # -*- coding: utf-8 -*- # Generated by Django", "= [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='BudgetItem', fields=[", "# -*- coding: utf-8 -*- # Generated by Django 1.11.13", "('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'), ('STAGED', 'Staged'), ('INSERTED', 'Inserted'), ('DELETED',", "django.contrib.postgres.fields.jsonb from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration):", "22:54 from __future__ import unicode_literals from django.conf import settings import", "Django 1.11.13 on 2018-06-08 22:54 from __future__ import unicode_literals from", "django.conf import settings import django.contrib.postgres.fields.jsonb from django.db import migrations, models", "verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')), ('raw',", "('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.AddField(", "('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)), ('row_number', models.IntegerField()), ], ), migrations.CreateModel( name='Upload', fields=[", "on 2018-06-08 22:54 from __future__ import unicode_literals from django.conf import", "models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'abstract':", "models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'),", "# Generated by Django 1.11.13 on 2018-06-08 22:54 from __future__", "operations = [ migrations.CreateModel( name='BudgetItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False,", "models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+',", "('file', models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING',", "('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'), ('STAGED', 'Staged'), ('INSERTED',", "migrations.CreateModel( name='BudgetItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('year', models.IntegerField()),", "verbose_name='ID')), ('year', models.IntegerField()), ('agency', models.TextField()), ('data_source', models.TextField()), ('category', models.TextField()), ('dollars_budgeted',", "django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING', 'Loading')," ]
[ "based on SMO algorithm', author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT License', long_description=open('README.md', encoding='utf-8').read(),", "#enables develop setuptools.setup( name='pysvm', version='0.1', description='PySVM : A NumPy implementation", "description='PySVM : A NumPy implementation of SVM based on SMO", "License', long_description=open('README.md', encoding='utf-8').read(), install_requires=[ #自动安装依赖 'numpy', 'sklearn' ], url='https://github.com/Kaslanarian/PySVM', )", "setuptools.setup( name='pysvm', version='0.1', description='PySVM : A NumPy implementation of SVM", ": A NumPy implementation of SVM based on SMO algorithm',", "implementation of SVM based on SMO algorithm', author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT", "on SMO algorithm', author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT License', long_description=open('README.md', encoding='utf-8').read(), install_requires=[", "develop setuptools.setup( name='pysvm', version='0.1', description='PySVM : A NumPy implementation of", "of SVM based on SMO algorithm', author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT License',", "packages=['pysvm'], license='MIT License', long_description=open('README.md', encoding='utf-8').read(), install_requires=[ #自动安装依赖 'numpy', 'sklearn' ],", "name='pysvm', version='0.1', description='PySVM : A NumPy implementation of SVM based", "import setuptools #enables develop setuptools.setup( name='pysvm', version='0.1', description='PySVM : A", "license='MIT License', long_description=open('README.md', encoding='utf-8').read(), install_requires=[ #自动安装依赖 'numpy', 'sklearn' ], url='https://github.com/Kaslanarian/PySVM',", "NumPy implementation of SVM based on SMO algorithm', author_email=\"<EMAIL>\", packages=['pysvm'],", "A NumPy implementation of SVM based on SMO algorithm', author_email=\"<EMAIL>\",", "SMO algorithm', author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT License', long_description=open('README.md', encoding='utf-8').read(), install_requires=[ #自动安装依赖", "version='0.1', description='PySVM : A NumPy implementation of SVM based on", "setuptools #enables develop setuptools.setup( name='pysvm', version='0.1', description='PySVM : A NumPy", "SVM based on SMO algorithm', author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT License', long_description=open('README.md',", "algorithm', author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT License', long_description=open('README.md', encoding='utf-8').read(), install_requires=[ #自动安装依赖 'numpy',", "author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT License', long_description=open('README.md', encoding='utf-8').read(), install_requires=[ #自动安装依赖 'numpy', 'sklearn'" ]
[ "cv2.imread(PATH_TO_IMAGE) image_expanded = np.expand_dims(image, axis=0) # Perform the actual detection", "boxes and scores around the objects of interest in the", "[1, None, None, 3] # i.e. a single-column array, where", "results') vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60)", "Number of objects detected num_detections = detection_graph.get_tensor_by_name('num_detections:0') # Load image", "on an image. # It draws boxes and scores around", "# Path to label map file PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt') #", "Some of the code is copied from Google's example at", "any key to close the image cv2.waitKey(0) # Clean up", "the results') vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=8,", "that is used # for object detection. PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb')", "np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60) # All the results have", "the notebook is stored in the object_detection folder. sys.path.append(\"..\") #", "# Each score represents level of confidence for each of", "# i.e. a single-column array, where each item in the", "# It loads the classifier uses it to perform object", "os import cv2 import numpy as np import tensorflow as", "on the result image, together with the class label. detection_scores", "sys.path.append(\"..\") # Import utilites from utils import label_map_util from utils", "a part of the image where a particular object was", "cv2 import numpy as np import tensorflow as tf import", "os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path to label map file PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt')", "string labels would be fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories =", "dimensions to have shape: [1, None, None, 3] # i.e.", "os.getcwd() # Path to frozen detection graph .pb file, which", "OpenCV and # expand image dimensions to have shape: [1,", "## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but I changed it to make it", "image where a particular object was detected detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0')", "for the object detection classifier # Input tensor is the", "detection. # It loads the classifier uses it to perform", "loads the classifier uses it to perform object detection on", "Path to frozen detection graph .pb file, which contains the", "at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and some is copied from Dat", "= detection_graph.get_tensor_by_name('detection_boxes:0') # Each score represents level of confidence for", "the image where a particular object was detected detection_boxes =", "the classifier uses it to perform object detection on an", "RGB value image = cv2.imread(PATH_TO_IMAGE) image_expanded = np.expand_dims(image, axis=0) #", "image image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors are the detection", "the objects. # The score is shown on the result", "the object detection classifier # Input tensor is the image", "column has the pixel RGB value image = cv2.imread(PATH_TO_IMAGE) image_expanded", "It loads the classifier uses it to perform object detection", "detection_graph.get_tensor_by_name('detection_scores:0') detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') # Number of objects detected num_detections", "to make it more understandable to me. # Import packages", "IMAGE_NAME = 'test1.jpg' # Grab path to current working directory", "axis=0) # Perform the actual detection by running the model", "serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') sess = tf.Session(graph=detection_graph) #", "is stored in the object_detection folder. sys.path.append(\"..\") # Import utilites", "image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors are the detection boxes,", "= 'inference_graph' IMAGE_NAME = 'test1.jpg' # Grab path to current", "when our convolution # network predicts `5`, we know that", "some is copied from Dat Tran's example at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py", "tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='')", "the actual detection by running the model with the image", "current working directory CWD_PATH = os.getcwd() # Path to frozen", "memory. detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef() with", "num) = sess.run( [detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_expanded}) #", "Tran's example at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but I changed it", "box represents a part of the image where a particular", "vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60) #", "and classes # Each box represents a part of the", "This program uses a TensorFlow-trained classifier to perform object detection.", "to me. # Import packages import os import cv2 import", "by running the model with the image as input (boxes,", "Google's example at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and some is copied", "the results of the detection (aka 'visulaize the results') vis_util.visualize_boxes_and_labels_on_image_array(", "display the image. cv2.imshow('Object detector', image) # Press any key", "the image as input (boxes, scores, classes, num) = sess.run(", "# Press any key to close the image cv2.waitKey(0) #", "tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as", "detector can identify NUM_CLASSES = 6 # Load the label", "stored in the object_detection folder. sys.path.append(\"..\") # Import utilites from", "an image. # It draws boxes and scores around the", "numpy as np import tensorflow as tf import sys #", "labels would be fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map,", "fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') sess = tf.Session(graph=detection_graph) # Define input", "interest in the image. ## Some of the code is", "that returns a # dictionary mapping integers to appropriate string", "# Label maps map indices to category names, so that", "changed it to make it more understandable to me. #", "particular object was detected detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') # Each score", "frozen detection graph .pb file, which contains the model that", "Output tensors are the detection boxes, scores, and classes #", "represents level of confidence for each of the objects. #", "Load image using OpenCV and # expand image dimensions to", "expand image dimensions to have shape: [1, None, None, 3]", "os.path.join(CWD_PATH,IMAGE_NAME) # Number of classes the object detector can identify", "label map file PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path to image", "# Define input and output tensors (i.e. data) for the", "name='') sess = tf.Session(graph=detection_graph) # Define input and output tensors", "to frozen detection graph .pb file, which contains the model", "classes # Each box represents a part of the image", "objects detected num_detections = detection_graph.get_tensor_by_name('num_detections:0') # Load image using OpenCV", "of interest in the image. ## Some of the code", "= 'test1.jpg' # Grab path to current working directory CWD_PATH", "image. ## Some of the code is copied from Google's", "utils import visualization_utils as vis_util # Name of the directory", "categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) category_index = label_map_util.create_category_index(categories) # Load", "a # dictionary mapping integers to appropriate string labels would", "objects of interest in the image. ## Some of the", "# Number of objects detected num_detections = detection_graph.get_tensor_by_name('num_detections:0') # Load", "image PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME) # Number of classes the object", "and some is copied from Dat Tran's example at ##", "= tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb')", "around the objects of interest in the image. ## Some", "PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME) # Number of classes the object detector", "# Path to image PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME) # Number of", "as tf import sys # This is needed since the", "model that is used # for object detection. PATH_TO_CKPT =", "vis_util # Name of the directory containing the object detection", "6 # Load the label map. # Label maps map", "Description: # This program uses a TensorFlow-trained classifier to perform", "image, together with the class label. detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') detection_classes", "functions, but anything that returns a # dictionary mapping integers", "each item in the column has the pixel RGB value", "num_detections], feed_dict={image_tensor: image_expanded}) # Draw the results of the detection", "the results have been drawn on image. Now display the", "to have shape: [1, None, None, 3] # i.e. a", "label. detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') # Number of", "mapping integers to appropriate string labels would be fine label_map", "# It draws boxes and scores around the objects of", "Detection Using Tensorflow-trained Classifier ######### # # Author: <NAME> #", "## but I changed it to make it more understandable", "copied from Dat Tran's example at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but", "path to current working directory CWD_PATH = os.getcwd() # Path", "= label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) category_index = label_map_util.create_category_index(categories)", "[detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_expanded}) # Draw the results", "together with the class label. detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') detection_classes =", "model with the image as input (boxes, scores, classes, num)", "the image. cv2.imshow('Object detector', image) # Press any key to", "tensors are the detection boxes, scores, and classes # Each", "where a particular object was detected detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') #", "detection boxes, scores, and classes # Each box represents a", "MODEL_NAME = 'inference_graph' IMAGE_NAME = 'test1.jpg' # Grab path to", "utility functions, but anything that returns a # dictionary mapping", "fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') sess = tf.Session(graph=detection_graph)", "to perform object detection. # It loads the classifier uses", "# Load image using OpenCV and # expand image dimensions", "and output tensors (i.e. data) for the object detection classifier", "of the objects. # The score is shown on the", "`king`. # Here we use internal utility functions, but anything", "working directory CWD_PATH = os.getcwd() # Path to frozen detection", "to image PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME) # Number of classes the", "np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60) # All the", "# Output tensors are the detection boxes, scores, and classes", "= detection_graph.get_tensor_by_name('detection_scores:0') detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') # Number of objects detected", "as np import tensorflow as tf import sys # This", "detection on an image. # It draws boxes and scores", "Tensorflow model into memory. detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def", "import label_map_util from utils import visualization_utils as vis_util # Name", "which contains the model that is used # for object", "more understandable to me. # Import packages import os import", "https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and some is copied from Dat Tran's example", "file, which contains the model that is used # for", "= tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid: serialized_graph = fid.read()", "tensor is the image image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors", "needed since the notebook is stored in the object_detection folder.", "Draw the results of the detection (aka 'visulaize the results')", "Image Object Detection Using Tensorflow-trained Classifier ######### # # Author:", "folder. sys.path.append(\"..\") # Import utilites from utils import label_map_util from", "detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') # Number of objects", "tensors (i.e. data) for the object detection classifier # Input", "is the image image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors are", "category names, so that when our convolution # network predicts", "containing the object detection module we're using MODEL_NAME = 'inference_graph'", "used # for object detection. PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path", "PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path to label map file PATH_TO_LABELS", "the objects of interest in the image. ## Some of", "dictionary mapping integers to appropriate string labels would be fine", "be fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True)", "it more understandable to me. # Import packages import os", "to appropriate string labels would be fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS)", "# The score is shown on the result image, together", "actual detection by running the model with the image as", "for each of the objects. # The score is shown", "network predicts `5`, we know that this corresponds to `king`.", "classifier uses it to perform object detection on an image.", "are the detection boxes, scores, and classes # Each box", "with the image as input (boxes, scores, classes, num) =", "'visulaize the results') vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index, use_normalized_coordinates=True,", "represents a part of the image where a particular object", "of confidence for each of the objects. # The score", "directory CWD_PATH = os.getcwd() # Path to frozen detection graph", "item in the column has the pixel RGB value image", "(i.e. data) for the object detection classifier # Input tensor", "score represents level of confidence for each of the objects.", "shape: [1, None, None, 3] # i.e. a single-column array,", "Number of classes the object detector can identify NUM_CLASSES =", "<NAME> # Date: 1/15/18 # Description: # This program uses", "value image = cv2.imread(PATH_TO_IMAGE) image_expanded = np.expand_dims(image, axis=0) # Perform", "detection. PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path to label map file", "import os import cv2 import numpy as np import tensorflow", "category_index, use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60) # All the results have been", "label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) category_index = label_map_util.create_category_index(categories) #", "it to perform object detection on an image. # It", "The score is shown on the result image, together with", "perform object detection. # It loads the classifier uses it", "been drawn on image. Now display the image. cv2.imshow('Object detector',", "the model that is used # for object detection. PATH_TO_CKPT", "was detected detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') # Each score represents level", "# Input tensor is the image image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') #", "have shape: [1, None, None, 3] # i.e. a single-column", "detector', image) # Press any key to close the image", "Load the label map. # Label maps map indices to", "the detection (aka 'visulaize the results') vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32),", "Each box represents a part of the image where a", "of classes the object detector can identify NUM_CLASSES = 6", "for object detection. PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path to label", "detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') # Number of objects detected num_detections =", "# Each box represents a part of the image where", "model into memory. detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def =", "since the notebook is stored in the object_detection folder. sys.path.append(\"..\")", "Press any key to close the image cv2.waitKey(0) # Clean", "internal utility functions, but anything that returns a # dictionary", "and # expand image dimensions to have shape: [1, None,", "but I changed it to make it more understandable to", "= label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) category_index = label_map_util.create_category_index(categories) # Load the", "Date: 1/15/18 # Description: # This program uses a TensorFlow-trained", "Label maps map indices to category names, so that when", "sess.run( [detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_expanded}) # Draw the", "# Perform the actual detection by running the model with", "# Load the label map. # Label maps map indices", "scores, classes, num) = sess.run( [detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor:", "as vis_util # Name of the directory containing the object", "classes the object detector can identify NUM_CLASSES = 6 #", "= detection_graph.get_tensor_by_name('detection_classes:0') # Number of objects detected num_detections = detection_graph.get_tensor_by_name('num_detections:0')", "object detection module we're using MODEL_NAME = 'inference_graph' IMAGE_NAME =", "have been drawn on image. Now display the image. cv2.imshow('Object", "to perform object detection on an image. # It draws", "# # Author: <NAME> # Date: 1/15/18 # Description: #", "PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path to image PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME)", "with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def,", "drawn on image. Now display the image. cv2.imshow('Object detector', image)", "the pixel RGB value image = cv2.imread(PATH_TO_IMAGE) image_expanded = np.expand_dims(image,", "a particular object was detected detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') # Each", "uses a TensorFlow-trained classifier to perform object detection. # It", "detection_graph.get_tensor_by_name('detection_classes:0') # Number of objects detected num_detections = detection_graph.get_tensor_by_name('num_detections:0') #", "the object detector can identify NUM_CLASSES = 6 # Load", "label_map_util from utils import visualization_utils as vis_util # Name of", "i.e. a single-column array, where each item in the column", "# Path to frozen detection graph .pb file, which contains", "object detection. # It loads the classifier uses it to", "use internal utility functions, but anything that returns a #", "= label_map_util.create_category_index(categories) # Load the Tensorflow model into memory. detection_graph", "image = cv2.imread(PATH_TO_IMAGE) image_expanded = np.expand_dims(image, axis=0) # Perform the", "key to close the image cv2.waitKey(0) # Clean up cv2.destroyAllWindows()", "would be fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES,", "os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path to image PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME) # Number", "our convolution # network predicts `5`, we know that this", "I changed it to make it more understandable to me.", "boxes, scores, and classes # Each box represents a part", "that this corresponds to `king`. # Here we use internal", "uses it to perform object detection on an image. #", "# Grab path to current working directory CWD_PATH = os.getcwd()", "np import tensorflow as tf import sys # This is", "is used # for object detection. PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') #", "label map. # Label maps map indices to category names,", "Load the Tensorflow model into memory. detection_graph = tf.Graph() with", "the class label. detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') #", "is needed since the notebook is stored in the object_detection", "(boxes, scores, classes, num) = sess.run( [detection_boxes, detection_scores, detection_classes, num_detections],", "running the model with the image as input (boxes, scores,", "Grab path to current working directory CWD_PATH = os.getcwd() #", "the image image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors are the", "convolution # network predicts `5`, we know that this corresponds", "program uses a TensorFlow-trained classifier to perform object detection. #", "notebook is stored in the object_detection folder. sys.path.append(\"..\") # Import", "draws boxes and scores around the objects of interest in", "# Import packages import os import cv2 import numpy as", "sys # This is needed since the notebook is stored", "Perform the actual detection by running the model with the", "code is copied from Google's example at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ##", "with the class label. detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') detection_classes = detection_graph.get_tensor_by_name('detection_classes:0')", "is copied from Google's example at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and", "visualization_utils as vis_util # Name of the directory containing the", "file PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path to image PATH_TO_IMAGE =", "label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) category_index = label_map_util.create_category_index(categories) # Load the Tensorflow", "# Draw the results of the detection (aka 'visulaize the", "Classifier ######### # # Author: <NAME> # Date: 1/15/18 #", "Path to label map file PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path", "identify NUM_CLASSES = 6 # Load the label map. #", "image_expanded = np.expand_dims(image, axis=0) # Perform the actual detection by", "the Tensorflow model into memory. detection_graph = tf.Graph() with detection_graph.as_default():", "object was detected detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') # Each score represents", "pixel RGB value image = cv2.imread(PATH_TO_IMAGE) image_expanded = np.expand_dims(image, axis=0)", "# Description: # This program uses a TensorFlow-trained classifier to", "from utils import label_map_util from utils import visualization_utils as vis_util", "object detection classifier # Input tensor is the image image_tensor", "a single-column array, where each item in the column has", "a TensorFlow-trained classifier to perform object detection. # It loads", "utilites from utils import label_map_util from utils import visualization_utils as", "= detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors are the detection boxes, scores,", "scores, and classes # Each box represents a part of", "can identify NUM_CLASSES = 6 # Load the label map.", "Name of the directory containing the object detection module we're", "'rb') as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') sess", "od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') sess = tf.Session(graph=detection_graph) # Define input and", "the code is copied from Google's example at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb", "# Date: 1/15/18 # Description: # This program uses a", "import visualization_utils as vis_util # Name of the directory containing", "Import packages import os import cv2 import numpy as np", "= os.getcwd() # Path to frozen detection graph .pb file,", "Object Detection Using Tensorflow-trained Classifier ######### # # Author: <NAME>", "= np.expand_dims(image, axis=0) # Perform the actual detection by running", "directory containing the object detection module we're using MODEL_NAME =", "None, 3] # i.e. a single-column array, where each item", "Path to image PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME) # Number of classes", "######### # # Author: <NAME> # Date: 1/15/18 # Description:", "the model with the image as input (boxes, scores, classes,", "= tf.Session(graph=detection_graph) # Define input and output tensors (i.e. data)", "# network predicts `5`, we know that this corresponds to", "as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') sess =", "= 6 # Load the label map. # Label maps", "in the image. ## Some of the code is copied", "Input tensor is the image image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') # Output", "Import utilites from utils import label_map_util from utils import visualization_utils", "that when our convolution # network predicts `5`, we know", "to `king`. # Here we use internal utility functions, but", "object detector can identify NUM_CLASSES = 6 # Load the", "detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors are the detection boxes, scores, and", "the column has the pixel RGB value image = cv2.imread(PATH_TO_IMAGE)", "Tensorflow-trained Classifier ######### # # Author: <NAME> # Date: 1/15/18", "object detection on an image. # It draws boxes and", "Using Tensorflow-trained Classifier ######### # # Author: <NAME> # Date:", "packages import os import cv2 import numpy as np import", "is copied from Dat Tran's example at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ##", "# This is needed since the notebook is stored in", "max_num_classes=NUM_CLASSES, use_display_name=True) category_index = label_map_util.create_category_index(categories) # Load the Tensorflow model", "import sys # This is needed since the notebook is", "image as input (boxes, scores, classes, num) = sess.run( [detection_boxes,", "classifier # Input tensor is the image image_tensor = detection_graph.get_tensor_by_name('image_tensor:0')", "map file PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path to image PATH_TO_IMAGE", "It draws boxes and scores around the objects of interest", "'inference_graph' IMAGE_NAME = 'test1.jpg' # Grab path to current working", "This is needed since the notebook is stored in the", "image. Now display the image. cv2.imshow('Object detector', image) # Press", "TensorFlow-trained classifier to perform object detection. # It loads the", "so that when our convolution # network predicts `5`, we", "know that this corresponds to `king`. # Here we use", "detection module we're using MODEL_NAME = 'inference_graph' IMAGE_NAME = 'test1.jpg'", "sess = tf.Session(graph=detection_graph) # Define input and output tensors (i.e.", "label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) category_index =", "but anything that returns a # dictionary mapping integers to", "`5`, we know that this corresponds to `king`. # Here", "class label. detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') # Number", "# Name of the directory containing the object detection module", "detection by running the model with the image as input", "import numpy as np import tensorflow as tf import sys", "# Here we use internal utility functions, but anything that", "tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph)", "= detection_graph.get_tensor_by_name('num_detections:0') # Load image using OpenCV and # expand", "has the pixel RGB value image = cv2.imread(PATH_TO_IMAGE) image_expanded =", "part of the image where a particular object was detected", "results have been drawn on image. Now display the image.", "confidence for each of the objects. # The score is", "None, None, 3] # i.e. a single-column array, where each", "tensorflow as tf import sys # This is needed since", "image using OpenCV and # expand image dimensions to have", "cv2.imshow('Object detector', image) # Press any key to close the", "into memory. detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef()", "to label map file PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path to", "image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60) # All", "from Dat Tran's example at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but I", "to current working directory CWD_PATH = os.getcwd() # Path to", "use_display_name=True) category_index = label_map_util.create_category_index(categories) # Load the Tensorflow model into", "np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60) # All the results", "the result image, together with the class label. detection_scores =", "and scores around the objects of interest in the image.", "we're using MODEL_NAME = 'inference_graph' IMAGE_NAME = 'test1.jpg' # Grab", "returns a # dictionary mapping integers to appropriate string labels", "this corresponds to `king`. # Here we use internal utility", "results of the detection (aka 'visulaize the results') vis_util.visualize_boxes_and_labels_on_image_array( image,", "# expand image dimensions to have shape: [1, None, None,", "as input (boxes, scores, classes, num) = sess.run( [detection_boxes, detection_scores,", "num_detections = detection_graph.get_tensor_by_name('num_detections:0') # Load image using OpenCV and #", "image. cv2.imshow('Object detector', image) # Press any key to close", "perform object detection on an image. # It draws boxes", "anything that returns a # dictionary mapping integers to appropriate", "category_index = label_map_util.create_category_index(categories) # Load the Tensorflow model into memory.", "line_thickness=8, min_score_thresh=0.60) # All the results have been drawn on", "input (boxes, scores, classes, num) = sess.run( [detection_boxes, detection_scores, detection_classes,", "## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and some is copied from Dat Tran's", "copied from Google's example at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and some", "names, so that when our convolution # network predicts `5`,", "feed_dict={image_tensor: image_expanded}) # Draw the results of the detection (aka", "detection classifier # Input tensor is the image image_tensor =", "CWD_PATH = os.getcwd() # Path to frozen detection graph .pb", "of objects detected num_detections = detection_graph.get_tensor_by_name('num_detections:0') # Load image using", "detection_graph.get_tensor_by_name('num_detections:0') # Load image using OpenCV and # expand image", "detected detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') # Each score represents level of", "of the detection (aka 'visulaize the results') vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes),", "detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid: serialized_graph", "with detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid:", "to category names, so that when our convolution # network", "shown on the result image, together with the class label.", "import cv2 import numpy as np import tensorflow as tf", "fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) category_index", "appropriate string labels would be fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories", "on image. Now display the image. cv2.imshow('Object detector', image) #", "= os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path to image PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME) #", "of the code is copied from Google's example at ##", "where each item in the column has the pixel RGB", "in the column has the pixel RGB value image =", "######## Image Object Detection Using Tensorflow-trained Classifier ######### # #", "Author: <NAME> # Date: 1/15/18 # Description: # This program", "tf import sys # This is needed since the notebook", "of the directory containing the object detection module we're using", "min_score_thresh=0.60) # All the results have been drawn on image.", "at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but I changed it to make", "detection graph .pb file, which contains the model that is", "single-column array, where each item in the column has the", "image) # Press any key to close the image cv2.waitKey(0)", "# Author: <NAME> # Date: 1/15/18 # Description: # This", "detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') # Each score represents level of confidence", "using MODEL_NAME = 'inference_graph' IMAGE_NAME = 'test1.jpg' # Grab path", "of the image where a particular object was detected detection_boxes", "level of confidence for each of the objects. # The", "graph .pb file, which contains the model that is used", "1/15/18 # Description: # This program uses a TensorFlow-trained classifier", "understandable to me. # Import packages import os import cv2", "example at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but I changed it to", "the directory containing the object detection module we're using MODEL_NAME", "'test1.jpg' # Grab path to current working directory CWD_PATH =", "data) for the object detection classifier # Input tensor is", "3] # i.e. a single-column array, where each item in", "# Number of classes the object detector can identify NUM_CLASSES", "the label map. # Label maps map indices to category", "we use internal utility functions, but anything that returns a", "contains the model that is used # for object detection.", "detection (aka 'visulaize the results') vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores),", "using OpenCV and # expand image dimensions to have shape:", "we know that this corresponds to `king`. # Here we", "the object_detection folder. sys.path.append(\"..\") # Import utilites from utils import", "make it more understandable to me. # Import packages import", "integers to appropriate string labels would be fine label_map =", "input and output tensors (i.e. data) for the object detection", "= os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path to label map file PATH_TO_LABELS =", "# All the results have been drawn on image. Now", "image dimensions to have shape: [1, None, None, 3] #", "corresponds to `king`. # Here we use internal utility functions,", "output tensors (i.e. data) for the object detection classifier #", "# dictionary mapping integers to appropriate string labels would be", "## and some is copied from Dat Tran's example at", "= fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') sess = tf.Session(graph=detection_graph) # Define", "Define input and output tensors (i.e. data) for the object", "map indices to category names, so that when our convolution", "the object detection module we're using MODEL_NAME = 'inference_graph' IMAGE_NAME", "predicts `5`, we know that this corresponds to `king`. #", "image. # It draws boxes and scores around the objects", "= sess.run( [detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_expanded}) # Draw", "utils import label_map_util from utils import visualization_utils as vis_util #", "image_expanded}) # Draw the results of the detection (aka 'visulaize", "= cv2.imread(PATH_TO_IMAGE) image_expanded = np.expand_dims(image, axis=0) # Perform the actual", "label_map_util.create_category_index(categories) # Load the Tensorflow model into memory. detection_graph =", "Now display the image. cv2.imshow('Object detector', image) # Press any", "use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60) # All the results have been drawn", "Dat Tran's example at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but I changed", "the image. ## Some of the code is copied from", "NUM_CLASSES = 6 # Load the label map. # Label", "objects. # The score is shown on the result image,", "tf.import_graph_def(od_graph_def, name='') sess = tf.Session(graph=detection_graph) # Define input and output", "score is shown on the result image, together with the", "(aka 'visulaize the results') vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index,", "map. # Label maps map indices to category names, so", "each of the objects. # The score is shown on", "is shown on the result image, together with the class", "detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_expanded}) # Draw the results of", "classes, num) = sess.run( [detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_expanded})", "All the results have been drawn on image. Now display", "array, where each item in the column has the pixel", "Each score represents level of confidence for each of the", "it to make it more understandable to me. # Import", "# for object detection. PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path to", "detected num_detections = detection_graph.get_tensor_by_name('num_detections:0') # Load image using OpenCV and", "detection_graph.get_tensor_by_name('detection_boxes:0') # Each score represents level of confidence for each", ".pb file, which contains the model that is used #", "<gh_stars>1-10 ######## Image Object Detection Using Tensorflow-trained Classifier ######### #", "maps map indices to category names, so that when our", "detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT,", "scores around the objects of interest in the image. ##", "in the object_detection folder. sys.path.append(\"..\") # Import utilites from utils", "# This program uses a TensorFlow-trained classifier to perform object", "tf.Session(graph=detection_graph) # Define input and output tensors (i.e. data) for", "np.expand_dims(image, axis=0) # Perform the actual detection by running the", "object detection. PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path to label map", "od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid: serialized_graph =", "import tensorflow as tf import sys # This is needed", "Here we use internal utility functions, but anything that returns", "## Some of the code is copied from Google's example", "https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but I changed it to make it more", "result image, together with the class label. detection_scores = detection_graph.get_tensor_by_name('detection_scores:0')", "object_detection folder. sys.path.append(\"..\") # Import utilites from utils import label_map_util", "me. # Import packages import os import cv2 import numpy", "classifier to perform object detection. # It loads the classifier", "module we're using MODEL_NAME = 'inference_graph' IMAGE_NAME = 'test1.jpg' #", "from Google's example at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and some is", "= os.path.join(CWD_PATH,IMAGE_NAME) # Number of classes the object detector can", "indices to category names, so that when our convolution #", "example at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and some is copied from", "from utils import visualization_utils as vis_util # Name of the", "detection_classes, num_detections], feed_dict={image_tensor: image_expanded}) # Draw the results of the", "the detection boxes, scores, and classes # Each box represents", "# Load the Tensorflow model into memory. detection_graph = tf.Graph()", "# Import utilites from utils import label_map_util from utils import" ]
[ "import BaseXpressDemocracyClubCsvImporter class Command(BaseXpressDemocracyClubCsvImporter): council_id = 'E06000027' addresses_name = 'parl.2017-06-08/Version", "data_collection.management.commands import BaseXpressDemocracyClubCsvImporter class Command(BaseXpressDemocracyClubCsvImporter): council_id = 'E06000027' addresses_name =", "1/Torbay Democracy_Club__08June2017.tsv' stations_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' elections = ['parl.2017-06-08']", "from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter class Command(BaseXpressDemocracyClubCsvImporter): council_id = 'E06000027' addresses_name", "= 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' stations_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' elections", "BaseXpressDemocracyClubCsvImporter class Command(BaseXpressDemocracyClubCsvImporter): council_id = 'E06000027' addresses_name = 'parl.2017-06-08/Version 1/Torbay", "Command(BaseXpressDemocracyClubCsvImporter): council_id = 'E06000027' addresses_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' stations_name", "'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' stations_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' elections =", "class Command(BaseXpressDemocracyClubCsvImporter): council_id = 'E06000027' addresses_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv'", "= 'E06000027' addresses_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' stations_name = 'parl.2017-06-08/Version", "council_id = 'E06000027' addresses_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' stations_name =", "stations_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' elections = ['parl.2017-06-08'] csv_delimiter =", "Democracy_Club__08June2017.tsv' stations_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' elections = ['parl.2017-06-08'] csv_delimiter", "= 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' elections = ['parl.2017-06-08'] csv_delimiter = '\\t'", "'E06000027' addresses_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' stations_name = 'parl.2017-06-08/Version 1/Torbay", "addresses_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' stations_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv'" ]
[ "rest_framework.views import APIView from rest_framework.response import Response from rest_framework.decorators import", "category_slug, format= None): category = self.get_object(category_slug) serializer = CategorySerializer(category) return", "category_slug): try: return Category.objects.get(slug=category_slug) except Category.DoesNotExist: raise Http404 def get(self,", "format= None): category = self.get_object(category_slug) serializer = CategorySerializer(category) return Response(serializer.data)", "serializer = ProductSerializer(products,many=True) return Response(serializer.data) class ProductDetail(APIView): def get_object(self, category_slug,", "= ProductSerializer(products,many=True) return Response(serializer.data) class ProductDetail(APIView): def get_object(self, category_slug, product_slug):", "= Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query)) serializer = ProductSerializer(products, many=True) return Response(serializer.data)", "APIView from rest_framework.response import Response from rest_framework.decorators import api_view from", "Http404 def get(self, request, category_slug, format= None): category = self.get_object(category_slug)", "query: products = Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query)) serializer = ProductSerializer(products, many=True)", "LatestProductsList(APIView): def get(self, request, format=None): products = Product.objects.all()[0:4] serializer =", "import Product, Category from .serializers import ProductSerializer, CategorySerializer class LatestProductsList(APIView):", "CategorySerializer(category) return Response(serializer.data) @api_view(['POST']) def search(request): query = request.data.get('query', '')", "get(self, request, category_slug, format= None): category = self.get_object(category_slug) serializer =", "return Response(serializer.data) @api_view(['POST']) def search(request): query = request.data.get('query', '') if", "import ProductSerializer, CategorySerializer class LatestProductsList(APIView): def get(self, request, format=None): products", "raise Http404 def get(self, request, category_slug, product_slug, format= None): product", "# Create your views here. from rest_framework.views import APIView from", "Product, Category from .serializers import ProductSerializer, CategorySerializer class LatestProductsList(APIView): def", "from rest_framework.response import Response from rest_framework.decorators import api_view from .models", "products = Product.objects.all()[0:4] serializer = ProductSerializer(products,many=True) return Response(serializer.data) class ProductDetail(APIView):", "get(self, request, format=None): products = Product.objects.all()[0:4] serializer = ProductSerializer(products,many=True) return", "try: return Category.objects.get(slug=category_slug) except Category.DoesNotExist: raise Http404 def get(self, request,", "serializer = CategorySerializer(category) return Response(serializer.data) @api_view(['POST']) def search(request): query =", "render from django.http import Http404 # Create your views here.", "Response(serializer.data) class ProductDetail(APIView): def get_object(self, category_slug, product_slug): try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug)", "'') if query: products = Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query)) serializer =", "Category from .serializers import ProductSerializer, CategorySerializer class LatestProductsList(APIView): def get(self,", "try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist: raise Http404 def get(self, request,", "product_slug) serializer = ProductSerializer(product) return Response(serializer.data) class CategoryDetail(APIView): def get_object(self,", "except Category.DoesNotExist: raise Http404 def get(self, request, category_slug, format= None):", "Response from rest_framework.decorators import api_view from .models import Product, Category", "None): category = self.get_object(category_slug) serializer = CategorySerializer(category) return Response(serializer.data) @api_view(['POST'])", "django.http import Http404 # Create your views here. from rest_framework.views", "def get(self, request, format=None): products = Product.objects.all()[0:4] serializer = ProductSerializer(products,many=True)", "None): product = self.get_object(category_slug, product_slug) serializer = ProductSerializer(product) return Response(serializer.data)", "django.db.models import Q from django.shortcuts import render from django.http import", "search(request): query = request.data.get('query', '') if query: products = Product.objects.filter(Q(name__icontains=query)", "class LatestProductsList(APIView): def get(self, request, format=None): products = Product.objects.all()[0:4] serializer", "here. from rest_framework.views import APIView from rest_framework.response import Response from", "from .models import Product, Category from .serializers import ProductSerializer, CategorySerializer", "return Response(serializer.data) class CategoryDetail(APIView): def get_object(self, category_slug): try: return Category.objects.get(slug=category_slug)", "api_view from .models import Product, Category from .serializers import ProductSerializer,", "serializer = ProductSerializer(products, many=True) return Response(serializer.data) else: return Response({\"products\": []})", "from rest_framework.views import APIView from rest_framework.response import Response from rest_framework.decorators", "Category.objects.get(slug=category_slug) except Category.DoesNotExist: raise Http404 def get(self, request, category_slug, format=", "Http404 def get(self, request, category_slug, product_slug, format= None): product =", "= self.get_object(category_slug, product_slug) serializer = ProductSerializer(product) return Response(serializer.data) class CategoryDetail(APIView):", "@api_view(['POST']) def search(request): query = request.data.get('query', '') if query: products", "product = self.get_object(category_slug, product_slug) serializer = ProductSerializer(product) return Response(serializer.data) class", "Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query)) serializer = ProductSerializer(products, many=True) return Response(serializer.data) else:", "import render from django.http import Http404 # Create your views", "get_object(self, category_slug): try: return Category.objects.get(slug=category_slug) except Category.DoesNotExist: raise Http404 def", "def get_object(self, category_slug, product_slug): try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist: raise", "request, format=None): products = Product.objects.all()[0:4] serializer = ProductSerializer(products,many=True) return Response(serializer.data)", "Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist: raise Http404 def get(self, request, category_slug, product_slug,", "get(self, request, category_slug, product_slug, format= None): product = self.get_object(category_slug, product_slug)", "class CategoryDetail(APIView): def get_object(self, category_slug): try: return Category.objects.get(slug=category_slug) except Category.DoesNotExist:", ".serializers import ProductSerializer, CategorySerializer class LatestProductsList(APIView): def get(self, request, format=None):", "format=None): products = Product.objects.all()[0:4] serializer = ProductSerializer(products,many=True) return Response(serializer.data) class", "category_slug, product_slug): try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist: raise Http404 def", "self.get_object(category_slug, product_slug) serializer = ProductSerializer(product) return Response(serializer.data) class CategoryDetail(APIView): def", "raise Http404 def get(self, request, category_slug, format= None): category =", "django.shortcuts import render from django.http import Http404 # Create your", "serializer = ProductSerializer(product) return Response(serializer.data) class CategoryDetail(APIView): def get_object(self, category_slug):", "import Http404 # Create your views here. from rest_framework.views import", "Response(serializer.data) @api_view(['POST']) def search(request): query = request.data.get('query', '') if query:", "ProductDetail(APIView): def get_object(self, category_slug, product_slug): try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist:", "Http404 # Create your views here. from rest_framework.views import APIView", "Response(serializer.data) class CategoryDetail(APIView): def get_object(self, category_slug): try: return Category.objects.get(slug=category_slug) except", "= self.get_object(category_slug) serializer = CategorySerializer(category) return Response(serializer.data) @api_view(['POST']) def search(request):", "products = Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query)) serializer = ProductSerializer(products, many=True) return", "views here. from rest_framework.views import APIView from rest_framework.response import Response", "CategoryDetail(APIView): def get_object(self, category_slug): try: return Category.objects.get(slug=category_slug) except Category.DoesNotExist: raise", "from rest_framework.decorators import api_view from .models import Product, Category from", "request, category_slug, format= None): category = self.get_object(category_slug) serializer = CategorySerializer(category)", "= CategorySerializer(category) return Response(serializer.data) @api_view(['POST']) def search(request): query = request.data.get('query',", "category_slug, product_slug, format= None): product = self.get_object(category_slug, product_slug) serializer =", "import api_view from .models import Product, Category from .serializers import", "= request.data.get('query', '') if query: products = Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query))", "rest_framework.response import Response from rest_framework.decorators import api_view from .models import", "rest_framework.decorators import api_view from .models import Product, Category from .serializers", "from .serializers import ProductSerializer, CategorySerializer class LatestProductsList(APIView): def get(self, request,", "Create your views here. from rest_framework.views import APIView from rest_framework.response", "= ProductSerializer(product) return Response(serializer.data) class CategoryDetail(APIView): def get_object(self, category_slug): try:", "def get_object(self, category_slug): try: return Category.objects.get(slug=category_slug) except Category.DoesNotExist: raise Http404", "request, category_slug, product_slug, format= None): product = self.get_object(category_slug, product_slug) serializer", "ProductSerializer(product) return Response(serializer.data) class CategoryDetail(APIView): def get_object(self, category_slug): try: return", "ProductSerializer, CategorySerializer class LatestProductsList(APIView): def get(self, request, format=None): products =", "from django.db.models import Q from django.shortcuts import render from django.http", "self.get_object(category_slug) serializer = CategorySerializer(category) return Response(serializer.data) @api_view(['POST']) def search(request): query", "get_object(self, category_slug, product_slug): try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist: raise Http404", "Product.DoesNotExist: raise Http404 def get(self, request, category_slug, product_slug, format= None):", "return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist: raise Http404 def get(self, request, category_slug,", "from django.shortcuts import render from django.http import Http404 # Create", "def get(self, request, category_slug, product_slug, format= None): product = self.get_object(category_slug,", "Q from django.shortcuts import render from django.http import Http404 #", "def search(request): query = request.data.get('query', '') if query: products =", "from django.http import Http404 # Create your views here. from", "Product.objects.all()[0:4] serializer = ProductSerializer(products,many=True) return Response(serializer.data) class ProductDetail(APIView): def get_object(self,", "if query: products = Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query)) serializer = ProductSerializer(products,", "product_slug): try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist: raise Http404 def get(self,", "| Q(description__icontains=query)) serializer = ProductSerializer(products, many=True) return Response(serializer.data) else: return", "import APIView from rest_framework.response import Response from rest_framework.decorators import api_view", "Q(description__icontains=query)) serializer = ProductSerializer(products, many=True) return Response(serializer.data) else: return Response({\"products\":", "import Response from rest_framework.decorators import api_view from .models import Product,", "class ProductDetail(APIView): def get_object(self, category_slug, product_slug): try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except", "def get(self, request, category_slug, format= None): category = self.get_object(category_slug) serializer", "category = self.get_object(category_slug) serializer = CategorySerializer(category) return Response(serializer.data) @api_view(['POST']) def", "product_slug, format= None): product = self.get_object(category_slug, product_slug) serializer = ProductSerializer(product)", "format= None): product = self.get_object(category_slug, product_slug) serializer = ProductSerializer(product) return", ".models import Product, Category from .serializers import ProductSerializer, CategorySerializer class", "except Product.DoesNotExist: raise Http404 def get(self, request, category_slug, product_slug, format=", "query = request.data.get('query', '') if query: products = Product.objects.filter(Q(name__icontains=query) |", "return Response(serializer.data) class ProductDetail(APIView): def get_object(self, category_slug, product_slug): try: return", "return Category.objects.get(slug=category_slug) except Category.DoesNotExist: raise Http404 def get(self, request, category_slug,", "CategorySerializer class LatestProductsList(APIView): def get(self, request, format=None): products = Product.objects.all()[0:4]", "request.data.get('query', '') if query: products = Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query)) serializer", "Category.DoesNotExist: raise Http404 def get(self, request, category_slug, format= None): category", "your views here. from rest_framework.views import APIView from rest_framework.response import", "= Product.objects.all()[0:4] serializer = ProductSerializer(products,many=True) return Response(serializer.data) class ProductDetail(APIView): def", "ProductSerializer(products,many=True) return Response(serializer.data) class ProductDetail(APIView): def get_object(self, category_slug, product_slug): try:", "import Q from django.shortcuts import render from django.http import Http404" ]
[ "other): return (self.id is None or other.id is None or", "maxsize class Contact: def __init__(self, fname=None, mname=None, lname=None, nick=None, title=None,", "home self.mobile = mobile self.work = work self.fax = fax", "is None or self.id == other.id) and self.fname == other.fname", "email3=None, homepage=None, bday=None, bmonth=None, byear=None, aday=None, amonth=None, ayear=None, secaddr=None, secphone=None,", "self.mname = mname self.lname = lname self.nick = nick self.title", "fname=None, mname=None, lname=None, nick=None, title=None, comp=None, addr=None, home=None, mobile=None, work=None,", "fname self.mname = mname self.lname = lname self.nick = nick", "nick self.title = title self.comp = comp self.addr = addr", "= home self.mobile = mobile self.work = work self.fax =", "mname=None, lname=None, nick=None, title=None, comp=None, addr=None, home=None, mobile=None, work=None, fax=None,", "(self.id is None or other.id is None or self.id ==", "= secphone self.note = note self.id = id def __repr__(self):", "bmonth=None, byear=None, aday=None, amonth=None, ayear=None, secaddr=None, secphone=None, note=None, id =None):", "email3 self.homepage = homepage self.bday = bday self.bmonth = bmonth", "and self.lname == other.lname def id_or_max(self): if self.id: return int(self.id)", "self.aday = aday self.amonth = amonth self.ayear = ayear self.secaddr", "bday self.bmonth = bmonth self.byear = byear self.aday = aday", "= amonth self.ayear = ayear self.secaddr = secaddr self.secphone =", "self.fname == other.fname and self.lname == other.lname def id_or_max(self): if", "self.note = note self.id = id def __repr__(self): return \"%s:%s:%s\"", "is None or other.id is None or self.id == other.id)", "id def __repr__(self): return \"%s:%s:%s\" % (self.id, self.fname, self.lname) def", "self.bday = bday self.bmonth = bmonth self.byear = byear self.aday", "bmonth self.byear = byear self.aday = aday self.amonth = amonth", "addr self.home = home self.mobile = mobile self.work = work", "= addr self.home = home self.mobile = mobile self.work =", "return (self.id is None or other.id is None or self.id", "bday=None, bmonth=None, byear=None, aday=None, amonth=None, ayear=None, secaddr=None, secphone=None, note=None, id", "= secaddr self.secphone = secphone self.note = note self.id =", "and self.fname == other.fname and self.lname == other.lname def id_or_max(self):", "None or other.id is None or self.id == other.id) and", "self.fname, self.lname) def __eq__(self, other): return (self.id is None or", "= note self.id = id def __repr__(self): return \"%s:%s:%s\" %", "homepage=None, bday=None, bmonth=None, byear=None, aday=None, amonth=None, ayear=None, secaddr=None, secphone=None, note=None,", "self.home = home self.mobile = mobile self.work = work self.fax", "self.homepage = homepage self.bday = bday self.bmonth = bmonth self.byear", "comp self.addr = addr self.home = home self.mobile = mobile", "self.secphone = secphone self.note = note self.id = id def", "(self.id, self.fname, self.lname) def __eq__(self, other): return (self.id is None", "Contact: def __init__(self, fname=None, mname=None, lname=None, nick=None, title=None, comp=None, addr=None,", "= mobile self.work = work self.fax = fax self.email1 =", "def __repr__(self): return \"%s:%s:%s\" % (self.id, self.fname, self.lname) def __eq__(self,", "= homepage self.bday = bday self.bmonth = bmonth self.byear =", "= id def __repr__(self): return \"%s:%s:%s\" % (self.id, self.fname, self.lname)", "self.byear = byear self.aday = aday self.amonth = amonth self.ayear", "== other.lname def id_or_max(self): if self.id: return int(self.id) else: return", "mobile self.work = work self.fax = fax self.email1 = email1", "% (self.id, self.fname, self.lname) def __eq__(self, other): return (self.id is", "work=None, fax=None, email1=None, email2=None, email3=None, homepage=None, bday=None, bmonth=None, byear=None, aday=None,", "title self.comp = comp self.addr = addr self.home = home", "self.work = work self.fax = fax self.email1 = email1 self.email2", "= fax self.email1 = email1 self.email2 = email2 self.email3 =", "byear=None, aday=None, amonth=None, ayear=None, secaddr=None, secphone=None, note=None, id =None): self.fname", "email2 self.email3 = email3 self.homepage = homepage self.bday = bday", "__repr__(self): return \"%s:%s:%s\" % (self.id, self.fname, self.lname) def __eq__(self, other):", "self.mobile = mobile self.work = work self.fax = fax self.email1", "= work self.fax = fax self.email1 = email1 self.email2 =", "= email2 self.email3 = email3 self.homepage = homepage self.bday =", "self.email3 = email3 self.homepage = homepage self.bday = bday self.bmonth", "def __init__(self, fname=None, mname=None, lname=None, nick=None, title=None, comp=None, addr=None, home=None,", "__init__(self, fname=None, mname=None, lname=None, nick=None, title=None, comp=None, addr=None, home=None, mobile=None,", "email1=None, email2=None, email3=None, homepage=None, bday=None, bmonth=None, byear=None, aday=None, amonth=None, ayear=None,", "byear self.aday = aday self.amonth = amonth self.ayear = ayear", "= ayear self.secaddr = secaddr self.secphone = secphone self.note =", "== other.id) and self.fname == other.fname and self.lname == other.lname", "addr=None, home=None, mobile=None, work=None, fax=None, email1=None, email2=None, email3=None, homepage=None, bday=None,", "title=None, comp=None, addr=None, home=None, mobile=None, work=None, fax=None, email1=None, email2=None, email3=None,", "mname self.lname = lname self.nick = nick self.title = title", "home=None, mobile=None, work=None, fax=None, email1=None, email2=None, email3=None, homepage=None, bday=None, bmonth=None,", "import maxsize class Contact: def __init__(self, fname=None, mname=None, lname=None, nick=None,", "= aday self.amonth = amonth self.ayear = ayear self.secaddr =", "other.lname def id_or_max(self): if self.id: return int(self.id) else: return maxsize", "or self.id == other.id) and self.fname == other.fname and self.lname", "class Contact: def __init__(self, fname=None, mname=None, lname=None, nick=None, title=None, comp=None,", "other.id) and self.fname == other.fname and self.lname == other.lname def", "work self.fax = fax self.email1 = email1 self.email2 = email2", "= lname self.nick = nick self.title = title self.comp =", "self.lname = lname self.nick = nick self.title = title self.comp", "\"%s:%s:%s\" % (self.id, self.fname, self.lname) def __eq__(self, other): return (self.id", "= email1 self.email2 = email2 self.email3 = email3 self.homepage =", "self.amonth = amonth self.ayear = ayear self.secaddr = secaddr self.secphone", "amonth self.ayear = ayear self.secaddr = secaddr self.secphone = secphone", "secaddr self.secphone = secphone self.note = note self.id = id", "self.nick = nick self.title = title self.comp = comp self.addr", "return \"%s:%s:%s\" % (self.id, self.fname, self.lname) def __eq__(self, other): return", "def __eq__(self, other): return (self.id is None or other.id is", "ayear self.secaddr = secaddr self.secphone = secphone self.note = note", "self.comp = comp self.addr = addr self.home = home self.mobile", "= email3 self.homepage = homepage self.bday = bday self.bmonth =", "self.lname) def __eq__(self, other): return (self.id is None or other.id", "self.ayear = ayear self.secaddr = secaddr self.secphone = secphone self.note", "email1 self.email2 = email2 self.email3 = email3 self.homepage = homepage", "other.fname and self.lname == other.lname def id_or_max(self): if self.id: return", "note=None, id =None): self.fname = fname self.mname = mname self.lname", "= byear self.aday = aday self.amonth = amonth self.ayear =", "= nick self.title = title self.comp = comp self.addr =", "None or self.id == other.id) and self.fname == other.fname and", "lname=None, nick=None, title=None, comp=None, addr=None, home=None, mobile=None, work=None, fax=None, email1=None,", "ayear=None, secaddr=None, secphone=None, note=None, id =None): self.fname = fname self.mname", "= title self.comp = comp self.addr = addr self.home =", "=None): self.fname = fname self.mname = mname self.lname = lname", "other.id is None or self.id == other.id) and self.fname ==", "secphone=None, note=None, id =None): self.fname = fname self.mname = mname", "self.addr = addr self.home = home self.mobile = mobile self.work", "fax=None, email1=None, email2=None, email3=None, homepage=None, bday=None, bmonth=None, byear=None, aday=None, amonth=None,", "= bmonth self.byear = byear self.aday = aday self.amonth =", "== other.fname and self.lname == other.lname def id_or_max(self): if self.id:", "mobile=None, work=None, fax=None, email1=None, email2=None, email3=None, homepage=None, bday=None, bmonth=None, byear=None,", "__eq__(self, other): return (self.id is None or other.id is None", "sys import maxsize class Contact: def __init__(self, fname=None, mname=None, lname=None,", "= fname self.mname = mname self.lname = lname self.nick =", "fax self.email1 = email1 self.email2 = email2 self.email3 = email3", "homepage self.bday = bday self.bmonth = bmonth self.byear = byear", "aday self.amonth = amonth self.ayear = ayear self.secaddr = secaddr", "note self.id = id def __repr__(self): return \"%s:%s:%s\" % (self.id,", "self.bmonth = bmonth self.byear = byear self.aday = aday self.amonth", "nick=None, title=None, comp=None, addr=None, home=None, mobile=None, work=None, fax=None, email1=None, email2=None,", "comp=None, addr=None, home=None, mobile=None, work=None, fax=None, email1=None, email2=None, email3=None, homepage=None,", "self.email1 = email1 self.email2 = email2 self.email3 = email3 self.homepage", "= bday self.bmonth = bmonth self.byear = byear self.aday =", "amonth=None, ayear=None, secaddr=None, secphone=None, note=None, id =None): self.fname = fname", "secphone self.note = note self.id = id def __repr__(self): return", "self.secaddr = secaddr self.secphone = secphone self.note = note self.id", "self.id = id def __repr__(self): return \"%s:%s:%s\" % (self.id, self.fname,", "= comp self.addr = addr self.home = home self.mobile =", "self.email2 = email2 self.email3 = email3 self.homepage = homepage self.bday", "aday=None, amonth=None, ayear=None, secaddr=None, secphone=None, note=None, id =None): self.fname =", "self.title = title self.comp = comp self.addr = addr self.home", "id =None): self.fname = fname self.mname = mname self.lname =", "self.fax = fax self.email1 = email1 self.email2 = email2 self.email3", "or other.id is None or self.id == other.id) and self.fname", "email2=None, email3=None, homepage=None, bday=None, bmonth=None, byear=None, aday=None, amonth=None, ayear=None, secaddr=None,", "= mname self.lname = lname self.nick = nick self.title =", "self.lname == other.lname def id_or_max(self): if self.id: return int(self.id) else:", "secaddr=None, secphone=None, note=None, id =None): self.fname = fname self.mname =", "self.id == other.id) and self.fname == other.fname and self.lname ==", "self.fname = fname self.mname = mname self.lname = lname self.nick", "lname self.nick = nick self.title = title self.comp = comp", "from sys import maxsize class Contact: def __init__(self, fname=None, mname=None," ]
[ "ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,", "in the # documentation and/or other materials provided with the", "\"b\", \"\", 1.0 ), ] ) savePath = os.path.abspath( os.path.join(", "p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"]", "def tearDown( self ) : savePath = os.path.abspath( os.path.join( os.path.dirname(", "# # Redistribution and use in source and binary forms,", "PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,", "testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testLoad( self", ") preset = IECore.BasicPreset( testObj, testObj.parameters() ) preset.save( savePath, \"basicPresetTestClassLoader\"", "\"\", \"IECORE_OP_PATHS\", os.path.join( \"maths\", \"multiply\" ), 2 ), ] )", "testLoad( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters(", "] self.assertNotEqual( classes1, classes2 ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"]", "def testLoad( self ) : testObj = IECore.Parameterised( \"testParameterised1\" )", "self ) : savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ),", "savePath, \"basicPresetTest.cob\" ) ) ) # reload p = IECore.BasicPreset(", "with the distribution. # # * Neither the name of", "] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassVectorParameter(", "savePath, \"basicPresetTest2\", classLoadable=False ) #reload p2 = IECore.BasicPreset( os.path.join( savePath,", "IECore.FloatParameter( \"c\", \"\", 0.0 ), ] ) p = IECore.BasicPreset(", "in binary form must reproduce the above copyright # notice,", "PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE", "savePath, \"basicPresetTest2.cob\" ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) )", ") self.assertFalse( p.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p.applicableTo( testObj2,", "savePath, \"basicPresetTest\", \"basicPresetTest-1.py\" ) ) ) # save without the", "p = loader.load( \"basicPresetTestClassLoader\" )() self.assertEqual( len( messageHandler.messages ), 0", "IECore.BasicPreset( testObj, testObj.parameters() ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) )", "\"c\", \"\", 0.0 ), ] ) p = IECore.BasicPreset( testObj,", ") self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) ) testObj.parameters()[\"a\"].setTypedValue( False )", "isinstance( p, IECore.BasicPreset ) ) p.metadata() def testClasses( self )", "DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND", "self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters()", ") self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() ) ) p2( testObj2, testObj2.parameters()", "contributors to this software may be used to endorse or", "False ), IECore.FloatParameter( \"c\", \"\", 0.0 ), ] ) savePath", "] classes2 = [ c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True", "conditions are # met: # # * Redistributions of source", "EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #", "len( messageHandler.messages ), 0 ) self.assertTrue( isinstance( p, IECore.BasicPreset )", "documentation and/or other materials provided with the distribution. # #", "IECore.BoolParameter( \"a\", \"\", False ), IECore.CompoundVectorParameter( \"c\", \"\", members =", "THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF", "testObj.parameters()[\"b\"] ) ) self.assertFalse( p.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue(", "modification, are permitted provided that the following conditions are #", "p.parameters().getValue().copy() preset = IECore.BasicPreset( p, p.parameters() ) self.assertTrue( preset.applicableTo( p,", "list of conditions and the following disclaimer in the #", "\"\", \"IECORE_OP_PATHS\" ), ] ) testObj.parameters()[\"b\"].setClasses( [ ( \"mult\", os.path.join(", "\"basicPreset\" ) ) paths = ( os.path.join( savePath, \"basicPresetTest\" ),", "\"b\", \"\", \"IECORE_OP_PATHS\", os.path.join( \"maths\", \"multiply\" ), 2 ), ]", "os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) messageHandler = IECore.CapturingMessageHandler()", "name of Image Engine Design nor the names of any", ") ) preset2.save( savePath, \"basicPresetTest2\", classLoadable=False ) #reload p2 =", ") self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testLoad( self ) :", ") testObj2.parameters().addParameters( [ IECore.ClassVectorParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] )", "OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT", "self.assertEqual( p.parameters().getValue(), v ) def tearDown( self ) : savePath", "LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR", ") p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [ True, False, True ] ) )", "source and binary forms, with or without # modification, are", "self ) : p = IECore.Parameterised( \"test\" ) p.parameters().addParameters( [", "IECore.ClassParameter( \"b\", \"\", \"IECORE_OP_PATHS\", os.path.join( \"maths\", \"multiply\" ), 2 ),", "testObj.parameters() ) # Save for the classLoader and check its", "\"basicPresetTest.cob\" ) ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse(", "testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) ) testObj.parameters()[\"a\"].setTypedValue(", "= IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest2.cob\" ) ) self.assertTrue( p2.applicableTo( testObj,", ") : def testCopy( self ) : testObj = IECore.Parameterised(", "p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = [ c[1:] for c", "IN ANY WAY OUT OF THE USE OF THIS #", "# documentation and/or other materials provided with the distribution. #", "INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY,", "CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF", "\"basicPresetTest\" ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\" ) )", "import sys import shutil import unittest import IECore class TestBasicPreset(", "NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE", ") # Save for the classLoader and check its there,", "), IECore.ClassVectorParameter( \"b\", \"\", \"IECORE_OP_PATHS\" ), ] ) testObj.parameters()[\"b\"].setClasses( [", ") preset( p, p.parameters() ) self.assertEqual( p.parameters().getValue(), v ) def", "THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## from __future__ import", "), IECore.BoolVectorParameter( \"b\", \"\", IECore.BoolVectorData() ), ] ) ] )", "classLoadable=False ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest.cob\" ) ) )", ") : p = IECore.Parameterised( \"test\" ) p.parameters().addParameters( [ IECore.BoolParameter(", "NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND", "GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS;", "testObj2 = IECore.Parameterised( \"testParameterised1\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\",", "# # * Neither the name of Image Engine Design", "and binary forms, with or without # modification, are permitted", "preset = IECore.BasicPreset( testObj, testObj.parameters() ) preset.save( savePath, \"basicPresetTestClassLoader\" )", "testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0", "= IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) self.assertTrue( p2.applicableTo(", "os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) preset =", "\"basicPresetTest\", \"basicPresetTest-1.cob\" ) ) ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\",", "OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY", "\"basicPresetLoadTest-1.cob\" ) ) self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue(", "os.path.isdir( p ) : shutil.rmtree( p ) elif os.path.isfile( p", "DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR #", "\"AS # IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,", "form must reproduce the above copyright # notice, this list", "testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) p2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"],", "self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\" ) ) ) self.assertTrue(", "classes2[1:] ) def testClassVectors( self ) : testObj = IECore.Parameterised(", "), ] ) p = IECore.BasicPreset( testObj, testObj.parameters() ) self.assertTrue(", "WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE", "with_statement import os import sys import shutil import unittest import", "testObj2, testObj2.parameters()[\"c\"] ) classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 =", "), \"data\", \"basicPreset\" ) ) messageHandler = IECore.CapturingMessageHandler() with messageHandler", "or # promote products derived from this software without specific", "THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR", "os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) preset = IECore.BasicPreset(", "] ) testObj.parameters()[\"b\"].setClasses( [ ( \"mult\", os.path.join( \"maths\", \"multiply\" ),", "CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN", ") classes1 = [ c[1:] for c in testObj.parameters()[\"b\"].getClasses( True", ") def testClassLoader( self ) : testObj = IECore.Parameterised( \"testParameterised1\"", "[ True, False, True ] ) ) v = p.parameters().getValue().copy()", "os.path.isfile( p ) : os.remove( p ) if __name__ ==", "testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassParameter( \"c\", \"\",", ") preset2.save( savePath, \"basicPresetTest2\", classLoadable=False ) #reload p2 = IECore.BasicPreset(", "), IECore.FloatParameter( \"c\", \"\", 0.0 ), ] ) p =", ") ] ) p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [ \"1\", \"2\", \"3\" ]", "NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;", "FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL", "A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL", "written permission. # # THIS SOFTWARE IS PROVIDED BY THE", "True ) classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertEqual( classes1[1:], classes2[1:]", "def testCopy( self ) : testObj = IECore.Parameterised( \"testParameterised1\" )", "testSave( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters(", "savePath, \"basicPresetTest.cob\" ) ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) )", "self.assertNotEqual( classes1[1:], classes2[1:] ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] )", "testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) def testSave( self ) : testObj =", "self.assertTrue( preset.applicableTo( p, p.parameters() ) ) p.parameters().setValue( p.parameters().defaultValue ) self.assertNotEqual(", "notice, this list of conditions and the following disclaimer. #", "OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY", "other materials provided with the distribution. # # * Neither", "reproduce the above copyright # notice, this list of conditions", "preset2.save( savePath, \"basicPresetTest2\", classLoadable=False ) #reload p2 = IECore.BasicPreset( os.path.join(", "Inc. All rights reserved. # # Redistribution and use in", "INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT", "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR", "] ) p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [ \"1\", \"2\", \"3\" ] )", "\"b\", \"\", 1.0 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\"", "must retain the above copyright # notice, this list of", "\"\", 1.0 ), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname(", "* Redistributions in binary form must reproduce the above copyright", "classLoader and check its there preset.save( savePath, \"basicPresetTest\", classLoadable=False )", "TestBasicPreset( unittest.TestCase ) : def testCopy( self ) : testObj", "OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF", "loader.load( \"basicPresetTestClassLoader\" )() self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue(", "prior # written permission. # # THIS SOFTWARE IS PROVIDED", "classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True )", "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS", ") self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) def testSave( self ) :", "# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF", "True ), IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) testObj2", "os.path.join( savePath, \"basicPresetTestClassLoader\" ), ) for p in paths :", "= testObj2.parameters()[\"c\"].getClass( True ) self.assertNotEqual( classes1[1:], classes2[1:] ) p =", "OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF", "p ) elif os.path.isfile( p ) : os.remove( p )", "[ c[1:] for c in testObj.parameters()[\"b\"].getClasses( True ) ] classes2", "may be used to endorse or # promote products derived", ") def testSave( self ) : testObj = IECore.Parameterised( \"testParameterised1\"", ") def testLoad( self ) : testObj = IECore.Parameterised( \"testParameterised1\"", "testCompoundVectorParameter( self ) : p = IECore.Parameterised( \"test\" ) p.parameters().addParameters(", "self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testClassLoader(", "v ) def tearDown( self ) : savePath = os.path.abspath(", "os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) messageHandler", "os.path.join( savePath, \"basicPresetTest.cob\" ) ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() )", ") ] self.assertEqual( classes1, classes2 ) def testCompoundVectorParameter( self )", ") classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertNotEqual( classes1[1:], classes2[1:] )", "), IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) savePath =", "COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT,", "FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT", "len( messageHandler.messages ), 0 ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() )", "DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,", "(INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT", "loading messageHandler = IECore.CapturingMessageHandler() with messageHandler : loader = IECore.ClassLoader(", "), ] ) classes1 = [ c[1:] for c in", "disclaimer. # # * Redistributions in binary form must reproduce", "IECore.StringVectorData( [ \"1\", \"2\", \"3\" ] ) ) p[\"c\"][\"b\"].setValue( IECore.BoolVectorData(", "IECore.BoolVectorData() ), ] ) ] ) p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [ \"1\",", "v = p.parameters().getValue().copy() preset = IECore.BasicPreset( p, p.parameters() ) self.assertTrue(", "of source code must retain the above copyright # notice,", "0 ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo(", "), \"data\", \"basicPreset\" ) ) preset = IECore.BasicPreset( testObj, testObj.parameters()", "# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT", "are emitted during loading messageHandler = IECore.CapturingMessageHandler() with messageHandler :", "paths = ( os.path.join( savePath, \"basicPresetTest\" ), os.path.join( savePath, \"basicPresetTest.cob\"", "SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR", ") testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.FloatParameter( \"b\",", "testObj2, testObj2.parameters()[\"c\"] ) ) p( testObj2, testObj2.parameters()[\"c\"] ) classes1 =", "without specific prior # written permission. # # THIS SOFTWARE", "IECore.BoolParameter( \"a\", \"\", True ), IECore.FloatParameter( \"b\", \"\", 1.0 ),", "= IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) preset2.save( savePath,", ") ) p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = [ c[1:]", "IECore.BasicPreset ) ) p.metadata() def testClasses( self ) : testObj", "\"\", 1.0 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised1\" )", "( \"coIO\", \"compoundObjectInOut\", 1 ), ] ) testObj2 = IECore.Parameterised(", "__file__ ), \"data\", \"basicPreset\" ) ) paths = ( os.path.join(", "\"test\" ) p.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.CompoundVectorParameter(", "EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, #", "IECore.BasicPreset( testObj, testObj.parameters() ) # Save for the classLoader and", "HOLDERS AND CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS OR", "preset.save( savePath, \"basicPresetTestClassLoader\" ) # make sure that no messages", "its there, we test the 'loadability' later... preset.save( savePath, \"basicPresetTest\"", "os.path.join( \"maths\", \"multiply\" ), 2 ), ] ) testObj2 =", "p.applicableTo( testObj2, testObj2.parameters()[\"c\"] ) ) p( testObj2, testObj2.parameters()[\"c\"] ) classes1", "provided with the distribution. # # * Neither the name", "0.0 ), ] ) p = IECore.BasicPreset( testObj, testObj.parameters() )", "specific prior # written permission. # # THIS SOFTWARE IS", "testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\",", "MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED.", ") self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2,", ") # save without the classLoader and check its there", "testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) preset2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"],", "# * Neither the name of Image Engine Design nor", "1.0 ), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname( __file__", "import unittest import IECore class TestBasicPreset( unittest.TestCase ) : def", "[ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassVectorParameter( \"b\", \"\", \"IECORE_OP_PATHS\"", "= [ IECore.StringVectorParameter( \"s\", \"\", IECore.StringVectorData() ), IECore.BoolVectorParameter( \"b\", \"\",", "p in paths : if os.path.isdir( p ) : shutil.rmtree(", "classes1[1:], classes2[1:] ) def testClassVectors( self ) : testObj =", "os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\" ) ) ) self.assertTrue( os.path.isfile( os.path.join(", "= IECore.BasicPreset( testObj, testObj.parameters() ) preset.save( savePath, \"basicPresetTestClassLoader\" ) #", "= os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) )", "__file__ ), \"data\", \"basicPreset\" ) ) preset = IECore.BasicPreset( testObj,", ": loader = IECore.ClassLoader( IECore.SearchPath( savePath ) ) p =", "# promote products derived from this software without specific prior", "testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testLoad( self ) : testObj =", "self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) ) testObj.parameters()[\"a\"].setTypedValue( False ) testObj.parameters()[\"b\"].setTypedValue(", "self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()", "materials provided with the distribution. # # * Neither the", "p2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) self.assertTrue(", "\"\", False ), IECore.CompoundVectorParameter( \"c\", \"\", members = [ IECore.StringVectorParameter(", "and/or other materials provided with the distribution. # # *", "OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,", "IECore.Parameterised( \"testParameterised1\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ),", "[ IECore.ClassVectorParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1 =", "# IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT", "testObj, testObj.parameters() ) ) self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()[\"c\"] ) )", ") ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo(", "or without # modification, are permitted provided that the following", "the distribution. # # * Neither the name of Image", "def testClasses( self ) : testObj = IECore.Parameterised( \"testParameterised1\" )", "Redistribution and use in source and binary forms, with or", "the following disclaimer in the # documentation and/or other materials", ") self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()[\"c\"] ) ) p( testObj2, testObj2.parameters()[\"c\"]", ") ) self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() ) ) p2( testObj2,", "provided that the following conditions are # met: # #", "endorse or # promote products derived from this software without", "# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,", "), ( \"coIO\", \"compoundObjectInOut\", 1 ), ] ) testObj2 =", "in paths : if os.path.isdir( p ) : shutil.rmtree( p", "p, p.parameters() ) self.assertEqual( p.parameters().getValue(), v ) def tearDown( self", "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE", "retain the above copyright # notice, this list of conditions", ") ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p2.applicableTo(", "[ IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter( \"c\", \"\", 0.0", "# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A", "INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT", "testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter( \"c\", \"\",", "testObj.parameters()[\"b\"].setClasses( [ ( \"mult\", os.path.join( \"maths\", \"multiply\" ), 2 ),", "USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE", "testCopy( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters(", "OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,", "[ IECore.BoolParameter( \"a\", \"\", True ), IECore.FloatParameter( \"b\", \"\", 1.0", ") self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p2.applicableTo( testObj2,", "SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", ") ) p2( testObj2, testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True )", "os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.py\" ) ) ) # save without", ": if os.path.isdir( p ) : shutil.rmtree( p ) elif", "os import sys import shutil import unittest import IECore class", "of Image Engine Design nor the names of any #", "rights reserved. # # Redistribution and use in source and", "IECore.BoolVectorParameter( \"b\", \"\", IECore.BoolVectorData() ), ] ) ] ) p[\"c\"][\"s\"].setValue(", "testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassVectorParameter( \"b\", \"\",", "binary form must reproduce the above copyright # notice, this", "p, IECore.BasicPreset ) ) p.metadata() def testClasses( self ) :", "testClassVectors( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters(", "for c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertNotEqual( classes1, classes2", "p2( testObj2, testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(),", "p.parameters().defaultValue ) self.assertNotEqual( p.parameters().getValue(), v ) preset( p, p.parameters() )", "testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertNotEqual( classes1, classes2 ) p =", "# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY", "shutil.rmtree( p ) elif os.path.isfile( p ) : os.remove( p", "that the following conditions are # met: # # *", ") ) p.parameters().setValue( p.parameters().defaultValue ) self.assertNotEqual( p.parameters().getValue(), v ) preset(", "Engine Design Inc. All rights reserved. # # Redistribution and", ": testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\",", "# Save for the classLoader and check its there, we", "[ c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertEqual(", "# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,", "self.assertNotEqual( classes1, classes2 ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] )", "testObj, testObj.parameters() ) preset.save( savePath, \"basicPresetTestClassLoader\" ) # make sure", "# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING", "testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) p2 = IECore.BasicPreset(", "disclaimer in the # documentation and/or other materials provided with", "= IECore.BasicPreset( testObj, testObj.parameters() ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() )", "= p.parameters().getValue().copy() preset = IECore.BasicPreset( p, p.parameters() ) self.assertTrue( preset.applicableTo(", "\"\", 0.0 ), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname(", "\"basicPresetTest\", \"basicPresetTest-1.py\" ) ) ) # save without the classLoader", "the following conditions are # met: # # * Redistributions", "= testObj2.parameters()[\"c\"].getClass( True ) self.assertEqual( classes1[1:], classes2[1:] ) def testClassVectors(", "following conditions are # met: # # * Redistributions of", "Design Inc. All rights reserved. # # Redistribution and use", "above copyright # notice, this list of conditions and the", "= testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertNotEqual(", "\"basicPresetTest\", classLoadable=False ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest.cob\" ) )", ") self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) p2 = IECore.BasicPreset( testObj, testObj.parameters(),", "\"\", True ), IECore.ClassVectorParameter( \"b\", \"\", \"IECORE_OP_PATHS\" ), ] )", "os.path.join( savePath, \"basicPresetTest2.cob\" ), os.path.join( savePath, \"basicPresetTestClassLoader\" ), ) for", "), IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) testObj2 =", "testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassParameter( \"b\", \"\",", "preset.save( savePath, \"basicPresetTest\" ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\"", "self.assertEqual( classes1, classes2 ) def testCompoundVectorParameter( self ) : p", "in source and binary forms, with or without # modification,", "\"b\", \"\", \"IECORE_OP_PATHS\" ), ] ) testObj.parameters()[\"b\"].setClasses( [ ( \"mult\",", "OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER", "preset = IECore.BasicPreset( testObj, testObj.parameters() ) # Save for the", "in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertEqual( classes1, classes2 ) def", ") ] self.assertNotEqual( classes1, classes2 ) p = IECore.BasicPreset( testObj,", "), IECore.ClassParameter( \"b\", \"\", \"IECORE_OP_PATHS\", os.path.join( \"maths\", \"multiply\" ), 2", "AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED", "PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\"", "[ IECore.ClassParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1 =", "list of conditions and the following disclaimer. # # *", "'loadability' later... preset.save( savePath, \"basicPresetTest\" ) self.assertTrue( os.path.isfile( os.path.join( savePath,", "IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest.cob\" ) ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()", "must reproduce the above copyright # notice, this list of", "] ) ) v = p.parameters().getValue().copy() preset = IECore.BasicPreset( p,", "OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR", "CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES,", "\"maths\", \"multiply\" ), 2 ), ] ) testObj2 = IECore.Parameterised(", ") ) paths = ( os.path.join( savePath, \"basicPresetTest\" ), os.path.join(", "with messageHandler : p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\"", ") p2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) )", "of any # other contributors to this software may be", "testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 )", "p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [ True, False, True ] ) ) v", ") v = p.parameters().getValue().copy() preset = IECore.BasicPreset( p, p.parameters() )", "), os.path.join( savePath, \"basicPresetTest.cob\" ), os.path.join( savePath, \"basicPresetTest2.cob\" ), os.path.join(", "testObj2, testObj2.parameters() ) ) testObj.parameters()[\"a\"].setTypedValue( False ) testObj.parameters()[\"b\"].setTypedValue( 0.0 )", "distribution. # # * Neither the name of Image Engine", ") testObj.parameters()[\"b\"].setClasses( [ ( \"mult\", os.path.join( \"maths\", \"multiply\" ), 2", "def testSave( self ) : testObj = IECore.Parameterised( \"testParameterised1\" )", "\"\", False ), IECore.FloatParameter( \"c\", \"\", 0.0 ), ] )", ") self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def", "p.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.CompoundVectorParameter( \"c\", \"\",", "use in source and binary forms, with or without #", "), ] ) testObj.parameters()[\"b\"].setClasses( [ ( \"mult\", os.path.join( \"maths\", \"multiply\"", "DAMAGE. # ########################################################################## from __future__ import with_statement import os import", "# make sure that no messages are emitted during loading", "] ) classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass(", "= loader.load( \"basicPresetTestClassLoader\" )() self.assertEqual( len( messageHandler.messages ), 0 )", "* Neither the name of Image Engine Design nor the", "\"IECORE_OP_PATHS\" ), ] ) testObj.parameters()[\"b\"].setClasses( [ ( \"mult\", os.path.join( \"maths\",", "os.path.join( savePath, \"basicPresetTest.cob\" ) ) ) # reload p =", "this software may be used to endorse or # promote", "ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES", "for c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertEqual( classes1, classes2", "] ) ] ) p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [ \"1\", \"2\", \"3\"", "CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE)", "os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) paths", "os.path.join( savePath, \"basicPresetTest.cob\" ), os.path.join( savePath, \"basicPresetTest2.cob\" ), os.path.join( savePath,", "paths : if os.path.isdir( p ) : shutil.rmtree( p )", "to this software may be used to endorse or #", "the # documentation and/or other materials provided with the distribution.", ") ) p = loader.load( \"basicPresetTestClassLoader\" )() self.assertEqual( len( messageHandler.messages", "os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) paths = (", "c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertEqual( classes1, classes2 )", "IECore.StringVectorData() ), IECore.BoolVectorParameter( \"b\", \"\", IECore.BoolVectorData() ), ] ) ]", "\"a\", \"\", False ), IECore.CompoundVectorParameter( \"c\", \"\", members = [", "testObj2.parameters()[\"c\"] ) classes1 = [ c[1:] for c in testObj.parameters()[\"b\"].getClasses(", "p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" ) ) self.assertEqual(", "1.0 ) preset2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], )", "( os.path.join( savePath, \"basicPresetTest\" ), os.path.join( savePath, \"basicPresetTest.cob\" ), os.path.join(", "\"\", \"IECORE_OP_PATHS\" ), ] ) classes1 = testObj.parameters()[\"b\"].getClass( True )", "IECore.BasicPreset( os.path.join( savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" ) ) self.assertEqual( len( messageHandler.messages", "IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassParameter( \"b\", \"\", \"IECORE_OP_PATHS\", os.path.join(", "\"a\", \"\", True ), IECore.FloatParameter( \"b\", \"\", 1.0 ), ]", ") def testCompoundVectorParameter( self ) : p = IECore.Parameterised( \"test\"", "\"basicPresetTest.cob\" ) ) ) # reload p = IECore.BasicPreset( os.path.join(", "testClassLoader( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters(", "= IECore.Parameterised( \"test\" ) p.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False", "permitted provided that the following conditions are # met: #", "WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN", "and the following disclaimer. # # * Redistributions in binary", ") self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) def", "c[1:] for c in testObj.parameters()[\"b\"].getClasses( True ) ] classes2 =", ") ) p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = testObj.parameters()[\"b\"].getClass( True", ") p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = [ c[1:] for", "testObj, testObj.parameters()[\"b\"] ) ) self.assertFalse( p.applicableTo( testObj, testObj.parameters() ) )", "p, p.parameters() ) ) p.parameters().setValue( p.parameters().defaultValue ) self.assertNotEqual( p.parameters().getValue(), v", "\"testParameterised1\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter(", "v ) preset( p, p.parameters() ) self.assertEqual( p.parameters().getValue(), v )", "Image Engine Design Inc. All rights reserved. # # Redistribution", "no messages are emitted during loading messageHandler = IECore.CapturingMessageHandler() with", "self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [", "\"basicPreset\" ) ) messageHandler = IECore.CapturingMessageHandler() with messageHandler : p", ": p = IECore.Parameterised( \"test\" ) p.parameters().addParameters( [ IECore.BoolParameter( \"a\",", "AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT,", ": shutil.rmtree( p ) elif os.path.isfile( p ) : os.remove(", ": def testCopy( self ) : testObj = IECore.Parameterised( \"testParameterised1\"", "and check its there preset.save( savePath, \"basicPresetTest\", classLoadable=False ) self.assertTrue(", "\"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1 = testObj.parameters()[\"b\"].getClass( True", "testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"] ) ) self.assertFalse( p.applicableTo(", "\"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.FloatParameter(", "IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) preset2.save( savePath, \"basicPresetTest2\",", "messageHandler = IECore.CapturingMessageHandler() with messageHandler : p = IECore.BasicPreset( os.path.join(", "BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\" AND", "# # * Redistributions of source code must retain the", "\"mult\", os.path.join( \"maths\", \"multiply\" ), 2 ), ( \"coIO\", \"compoundObjectInOut\",", "\"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" ) ) self.assertEqual( len( messageHandler.messages ), 0 )", "True ), IECore.ClassVectorParameter( \"b\", \"\", \"IECORE_OP_PATHS\" ), ] ) testObj.parameters()[\"b\"].setClasses(", ") self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( p.applicableTo( testObj,", "savePath, \"basicPresetTest2.cob\" ), os.path.join( savePath, \"basicPresetTestClassLoader\" ), ) for p", "0.0 ) def testClassLoader( self ) : testObj = IECore.Parameterised(", "self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testClassLoader( self ) : testObj", "IECore.StringVectorParameter( \"s\", \"\", IECore.StringVectorData() ), IECore.BoolVectorParameter( \"b\", \"\", IECore.BoolVectorData() ),", "with or without # modification, are permitted provided that the", "testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassVectorParameter( \"c\", \"\",", "testObj2.parameters().addParameters( [ IECore.ClassParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1", "IECore.BoolVectorData( [ True, False, True ] ) ) v =", "\"basicPresetTest-1.py\" ) ) ) # save without the classLoader and", "\"IECORE_OP_PATHS\", os.path.join( \"maths\", \"multiply\" ), 2 ), ] ) testObj2", ") testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassParameter( \"b\",", "names of any # other contributors to this software may", "), ] ) classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 =", ") p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = testObj.parameters()[\"b\"].getClass( True )", ": p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" ) )", "p.applicableTo( testObj, testObj.parameters()[\"b\"] ) ) self.assertFalse( p.applicableTo( testObj, testObj.parameters() )", "\"a\", \"\", True ), IECore.ClassParameter( \"b\", \"\", \"IECORE_OP_PATHS\", os.path.join( \"maths\",", "True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) p2 = IECore.BasicPreset( testObj,", "following disclaimer. # # * Redistributions in binary form must", "parameters=( testObj.parameters()[\"a\"], ) ) preset2.save( savePath, \"basicPresetTest2\", classLoadable=False ) #reload", "OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE", "testObj2, testObj.parameters() ) ) p2( testObj2, testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(),", "# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH", "import os import sys import shutil import unittest import IECore", "there preset.save( savePath, \"basicPresetTest\", classLoadable=False ) self.assertTrue( os.path.isfile( os.path.join( savePath,", "self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.py\" ) ) ) #", "savePath ) ) p = loader.load( \"basicPresetTestClassLoader\" )() self.assertEqual( len(", ") classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertEqual( classes1[1:], classes2[1:] )", "True ] ) ) v = p.parameters().getValue().copy() preset = IECore.BasicPreset(", ") : savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\",", ") testObj.parameters()[\"b\"].setTypedValue( 0.0 ) p( testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(),", "= IECore.CapturingMessageHandler() with messageHandler : p = IECore.BasicPreset( os.path.join( savePath,", "IECore.ClassVectorParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1 = [", "this software without specific prior # written permission. # #", "TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY", ") ) self.assertFalse( p.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p.applicableTo(", "testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertNotEqual( classes1[1:],", "(c) 2010-2012, Image Engine Design Inc. All rights reserved. #", ") p = IECore.BasicPreset( testObj, testObj.parameters() ) self.assertTrue( p.applicableTo( testObj,", ") p( testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual(", "def testClassVectors( self ) : testObj = IECore.Parameterised( \"testParameterised1\" )", "self.assertNotEqual( p.parameters().getValue(), v ) preset( p, p.parameters() ) self.assertEqual( p.parameters().getValue(),", "os.path.isfile( os.path.join( savePath, \"basicPresetTest.cob\" ) ) ) # reload p", "classLoadable=False ) #reload p2 = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest2.cob\" )", "os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) paths =", "classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertNotEqual( classes1[1:], classes2[1:] ) p", "IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT", "classes2 = [ c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True )", ") classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True", "\"s\", \"\", IECore.StringVectorData() ), IECore.BoolVectorParameter( \"b\", \"\", IECore.BoolVectorData() ), ]", "self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) p2 =", "Design nor the names of any # other contributors to", "unittest.TestCase ) : def testCopy( self ) : testObj =", "), 0 ) self.assertTrue( isinstance( p, IECore.BasicPreset ) ) p.metadata()", ") testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassVectorParameter( \"c\",", "True ) ] classes2 = [ c[1:] for c in", "), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [", "THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY", "classes1[1:], classes2[1:] ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue(", "preset.save( savePath, \"basicPresetTest\", classLoadable=False ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest.cob\"", "met: # # * Redistributions of source code must retain", "c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertNotEqual( classes1, classes2 )", "), ] ) ] ) p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [ \"1\", \"2\",", "used to endorse or # promote products derived from this", "SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED", "BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR", ") savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\"", "preset = IECore.BasicPreset( p, p.parameters() ) self.assertTrue( preset.applicableTo( p, p.parameters()", "1.0 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised1\" ) testObj2.parameters().addParameters(", "of conditions and the following disclaimer. # # * Redistributions", "# * Redistributions in binary form must reproduce the above", "and use in source and binary forms, with or without", "0.0 ) def testLoad( self ) : testObj = IECore.Parameterised(", ") ) preset = IECore.BasicPreset( testObj, testObj.parameters() ) preset.save( savePath,", "def testCompoundVectorParameter( self ) : p = IECore.Parameterised( \"test\" )", "testObj.parameters()[\"a\"], ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue(", "IECore.Parameterised( \"test\" ) p.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ),", "p ) : shutil.rmtree( p ) elif os.path.isfile( p )", "\"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassParameter(", "ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## from", "SUCH DAMAGE. # ########################################################################## from __future__ import with_statement import os", "self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()[\"c\"] ) ) p( testObj2, testObj2.parameters()[\"c\"] )", "are # met: # # * Redistributions of source code", "# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,", "\"basicPreset\" ) ) preset = IECore.BasicPreset( testObj, testObj.parameters() ) preset.save(", "\"3\" ] ) ) p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [ True, False, True", ") preset = IECore.BasicPreset( testObj, testObj.parameters() ) # Save for", "we test the 'loadability' later... preset.save( savePath, \"basicPresetTest\" ) self.assertTrue(", "savePath, \"basicPresetTest\", classLoadable=False ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest.cob\" )", "IECore.CompoundVectorParameter( \"c\", \"\", members = [ IECore.StringVectorParameter( \"s\", \"\", IECore.StringVectorData()", "Neither the name of Image Engine Design nor the names", "promote products derived from this software without specific prior #", "os.path.join( savePath, \"basicPresetTest2.cob\" ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() )", "c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertEqual( classes1,", "True ) ] self.assertNotEqual( classes1, classes2 ) p = IECore.BasicPreset(", "AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN", "p ) : os.remove( p ) if __name__ == \"__main__\":", ") self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) preset2 = IECore.BasicPreset( testObj, testObj.parameters(),", "), ) for p in paths : if os.path.isdir( p", "), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ),", ") testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassParameter( \"c\",", "] ) testObj2 = IECore.Parameterised( \"testParameterised1\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter(", "0.0 ) p( testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True )", "testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertEqual( classes1[1:],", "unittest import IECore class TestBasicPreset( unittest.TestCase ) : def testCopy(", "), IECore.FloatParameter( \"c\", \"\", 0.0 ), ] ) savePath =", "IECore.CapturingMessageHandler() with messageHandler : loader = IECore.ClassLoader( IECore.SearchPath( savePath )", "p.parameters().setValue( p.parameters().defaultValue ) self.assertNotEqual( p.parameters().getValue(), v ) preset( p, p.parameters()", "forms, with or without # modification, are permitted provided that", ") # reload p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest.cob\" )", "SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR #", "False ), IECore.FloatParameter( \"c\", \"\", 0.0 ), ] ) p", "TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR", "binary forms, with or without # modification, are permitted provided", "OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## from __future__", "IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest2.cob\" ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters()", "copyright # notice, this list of conditions and the following", "\"c\", \"\", 0.0 ), ] ) savePath = os.path.abspath( os.path.join(", "savePath, \"basicPresetTest\" ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\" )", "savePath, \"basicPresetTest.cob\" ), os.path.join( savePath, \"basicPresetTest2.cob\" ), os.path.join( savePath, \"basicPresetTestClassLoader\"", "# reload p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest.cob\" ) )", "software may be used to endorse or # promote products", "testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) preset2.save( savePath, \"basicPresetTest2\", classLoadable=False )", "parameters=( testObj.parameters()[\"a\"], ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) )", "classes1, classes2 ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue(", "members = [ IECore.StringVectorParameter( \"s\", \"\", IECore.StringVectorData() ), IECore.BoolVectorParameter( \"b\",", "POSSIBILITY OF SUCH DAMAGE. # ########################################################################## from __future__ import with_statement", "classes2 ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo(", "\"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ]", "EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE", "IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ),", "self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"] ) ) self.assertFalse( p.applicableTo( testObj, testObj.parameters()", "\"basicPresetTest.cob\" ), os.path.join( savePath, \"basicPresetTest2.cob\" ), os.path.join( savePath, \"basicPresetTestClassLoader\" ),", "classes1 = [ c[1:] for c in testObj.parameters()[\"b\"].getClasses( True )", "# Copyright (c) 2010-2012, Image Engine Design Inc. All rights", "] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassParameter(", "p = IECore.Parameterised( \"test\" ) p.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\",", "class TestBasicPreset( unittest.TestCase ) : def testCopy( self ) :", ") #reload p2 = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest2.cob\" ) )", "\"\", members = [ IECore.StringVectorParameter( \"s\", \"\", IECore.StringVectorData() ), IECore.BoolVectorParameter(", ") paths = ( os.path.join( savePath, \"basicPresetTest\" ), os.path.join( savePath,", "1.0 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters(", "2 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters(", "# met: # # * Redistributions of source code must", "IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) testObj2 = IECore.Parameterised(", ") for p in paths : if os.path.isdir( p )", ") ) self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( p.applicableTo(", "IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) self.assertTrue( p2.applicableTo( testObj,", "p.parameters().getValue(), v ) preset( p, p.parameters() ) self.assertEqual( p.parameters().getValue(), v", "import shutil import unittest import IECore class TestBasicPreset( unittest.TestCase )", "without the classLoader and check its there preset.save( savePath, \"basicPresetTest\",", "self.assertTrue( isinstance( p, IECore.BasicPreset ) ) p.metadata() def testClasses( self", "for c in testObj.parameters()[\"b\"].getClasses( True ) ] classes2 = [", "[ IECore.StringVectorParameter( \"s\", \"\", IECore.StringVectorData() ), IECore.BoolVectorParameter( \"b\", \"\", IECore.BoolVectorData()", ") def tearDown( self ) : savePath = os.path.abspath( os.path.join(", "USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED", "\"IECORE_OP_PATHS\" ), ] ) classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2", "\"coIO\", \"compoundObjectInOut\", 1 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\"", "p2.applicableTo( testObj2, testObj.parameters() ) ) p2( testObj2, testObj2.parameters() ) self.assertEqual(", "the above copyright # notice, this list of conditions and", "its there preset.save( savePath, \"basicPresetTest\", classLoadable=False ) self.assertTrue( os.path.isfile( os.path.join(", "self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest.cob\" ) ) ) # reload", "ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT", "\"basicPresetTest\" ), os.path.join( savePath, \"basicPresetTest.cob\" ), os.path.join( savePath, \"basicPresetTest2.cob\" ),", "True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) def testSave( self )", ") self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.py\" ) ) )", "Copyright (c) 2010-2012, Image Engine Design Inc. All rights reserved.", "with messageHandler : loader = IECore.ClassLoader( IECore.SearchPath( savePath ) )", "TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF", "FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO", "ARISING IN ANY WAY OUT OF THE USE OF THIS", "), 2 ), ( \"coIO\", \"compoundObjectInOut\", 1 ), ] )", "testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) preset2.save( savePath, \"basicPresetTest2\", classLoadable=False", "self.assertFalse( p.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()[\"c\"]", "] ) classes1 = [ c[1:] for c in testObj.parameters()[\"b\"].getClasses(", "messageHandler : loader = IECore.ClassLoader( IECore.SearchPath( savePath ) ) p", "# written permission. # # THIS SOFTWARE IS PROVIDED BY", "classes2[1:] ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo(", "True ), IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) savePath", "other contributors to this software may be used to endorse", "\"data\", \"basicPreset\" ) ) paths = ( os.path.join( savePath, \"basicPresetTest\"", "IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ),", "__file__ ), \"data\", \"basicPreset\" ) ) messageHandler = IECore.CapturingMessageHandler() with", ") self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\" ) ) )", "p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2", "PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY", "IECore.BasicPreset( testObj, testObj.parameters() ) preset.save( savePath, \"basicPresetTestClassLoader\" ) # make", "that no messages are emitted during loading messageHandler = IECore.CapturingMessageHandler()", "# Redistribution and use in source and binary forms, with", "( \"mult\", os.path.join( \"maths\", \"multiply\" ), 2 ), ( \"coIO\",", "testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) def testSave( self", "\"2\", \"3\" ] ) ) p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [ True, False,", "p.parameters() ) self.assertEqual( p.parameters().getValue(), v ) def tearDown( self )", "True ) classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertNotEqual( classes1[1:], classes2[1:]", "Engine Design nor the names of any # other contributors", "\"data\", \"basicPreset\" ) ) messageHandler = IECore.CapturingMessageHandler() with messageHandler :", "code must retain the above copyright # notice, this list", "from this software without specific prior # written permission. #", "False ) testObj.parameters()[\"b\"].setTypedValue( 0.0 ) p( testObj, testObj.parameters() ) self.assertEqual(", "True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testLoad( self )", "THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF", "testObj.parameters()[\"b\"].setTypedValue( 0.0 ) p( testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True", ") ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) ) testObj.parameters()[\"a\"].setTypedValue( False", "testObj2.parameters()[\"c\"].getClass( True ) self.assertEqual( classes1[1:], classes2[1:] ) def testClassVectors( self", ") : shutil.rmtree( p ) elif os.path.isfile( p ) :", "\"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassVectorParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ]", "p.metadata() def testClasses( self ) : testObj = IECore.Parameterised( \"testParameterised1\"", "testObj2.parameters()[\"c\"] ) ) p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = testObj.parameters()[\"b\"].getClass(", "), ] ) testObj2 = IECore.Parameterised( \"testParameterised1\" ) testObj2.parameters().addParameters( [", "* Redistributions of source code must retain the above copyright", "testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.FloatParameter( \"b\", \"\",", ") ) testObj.parameters()[\"a\"].setTypedValue( False ) testObj.parameters()[\"b\"].setTypedValue( 0.0 ) p( testObj,", "), \"data\", \"basicPreset\" ) ) paths = ( os.path.join( savePath,", "2 ), ( \"coIO\", \"compoundObjectInOut\", 1 ), ] ) testObj2", ") ] classes2 = [ c[1:] for c in testObj2.parameters()[\"c\"].getClasses(", ") self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testClassLoader( self ) :", "preset( p, p.parameters() ) self.assertEqual( p.parameters().getValue(), v ) def tearDown(", "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR #", "# modification, are permitted provided that the following conditions are", "the following disclaimer. # # * Redistributions in binary form", "if os.path.isdir( p ) : shutil.rmtree( p ) elif os.path.isfile(", "OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT", "make sure that no messages are emitted during loading messageHandler", "IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR", ") ) # reload p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest.cob\"", "testObj.parameters() ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo(", "tearDown( self ) : savePath = os.path.abspath( os.path.join( os.path.dirname( __file__", "conditions and the following disclaimer in the # documentation and/or", "p, p.parameters() ) self.assertTrue( preset.applicableTo( p, p.parameters() ) ) p.parameters().setValue(", "\"basicPresetTestClassLoader\" ), ) for p in paths : if os.path.isdir(", "STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING", ") p.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.CompoundVectorParameter( \"c\",", ": savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\"", "THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\" AND ANY", "False ), IECore.CompoundVectorParameter( \"c\", \"\", members = [ IECore.StringVectorParameter( \"s\",", "self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p2.applicableTo( testObj2, testObj.parameters()", "OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR", "\"basicPresetTest2\", classLoadable=False ) #reload p2 = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest2.cob\"", "] ) p = IECore.BasicPreset( testObj, testObj.parameters() ) self.assertTrue( p.applicableTo(", ")() self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( isinstance( p,", "= IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"] )", "testObj2.parameters() ) ) testObj.parameters()[\"a\"].setTypedValue( False ) testObj.parameters()[\"b\"].setTypedValue( 0.0 ) p(", "= [ c[1:] for c in testObj.parameters()[\"b\"].getClasses( True ) ]", "sure that no messages are emitted during loading messageHandler =", "), IECore.CompoundVectorParameter( \"c\", \"\", members = [ IECore.StringVectorParameter( \"s\", \"\",", "testObj.parameters()[\"b\"].getClasses( True ) ] classes2 = [ c[1:] for c", ") self.assertNotEqual( classes1[1:], classes2[1:] ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"]", "\"a\", \"\", True ), IECore.ClassVectorParameter( \"b\", \"\", \"IECORE_OP_PATHS\" ), ]", "messageHandler = IECore.CapturingMessageHandler() with messageHandler : loader = IECore.ClassLoader( IECore.SearchPath(", "# notice, this list of conditions and the following disclaimer.", ") ) messageHandler = IECore.CapturingMessageHandler() with messageHandler : p =", "OF SUCH DAMAGE. # ########################################################################## from __future__ import with_statement import", ") ) p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [ True, False, True ] )", "\"basicPreset\" ) ) preset = IECore.BasicPreset( testObj, testObj.parameters() ) #", "check its there preset.save( savePath, \"basicPresetTest\", classLoadable=False ) self.assertTrue( os.path.isfile(", ") # make sure that no messages are emitted during", "savePath, \"basicPresetTestClassLoader\" ) # make sure that no messages are", "IECore.SearchPath( savePath ) ) p = loader.load( \"basicPresetTestClassLoader\" )() self.assertEqual(", "# # * Redistributions in binary form must reproduce the", "IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS #", "AND CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS OR IMPLIED", ") ) self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()[\"c\"] ) ) p( testObj2,", "NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE", "of conditions and the following disclaimer in the # documentation", "the name of Image Engine Design nor the names of", "OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE", "= IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassParameter( \"c\", \"\", \"IECORE_OP_PATHS\"", "os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\" ) ) ) self.assertTrue( os.path.isfile(", "def testClassLoader( self ) : testObj = IECore.Parameterised( \"testParameterised1\" )", ") ) p.metadata() def testClasses( self ) : testObj =", "classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertEqual( classes1[1:], classes2[1:] ) def", "save without the classLoader and check its there preset.save( savePath,", "preset.applicableTo( p, p.parameters() ) ) p.parameters().setValue( p.parameters().defaultValue ) self.assertNotEqual( p.parameters().getValue(),", "os.path.join( savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" ) ) self.assertEqual( len( messageHandler.messages ),", "\"data\", \"basicPreset\" ) ) preset = IECore.BasicPreset( testObj, testObj.parameters() )", "= IECore.CapturingMessageHandler() with messageHandler : loader = IECore.ClassLoader( IECore.SearchPath( savePath", "testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) )", "testObj2.parameters()[\"c\"] ) ) p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = [", "= IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True", "LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS", "IECore.ClassLoader( IECore.SearchPath( savePath ) ) p = loader.load( \"basicPresetTestClassLoader\" )()", "########################################################################## from __future__ import with_statement import os import sys import", "IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED", ") def testClassVectors( self ) : testObj = IECore.Parameterised( \"testParameterised1\"", "testObj2, testObj2.parameters()[\"c\"] ) classes1 = [ c[1:] for c in", "self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testLoad( self ) : testObj", "# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND", ") preset2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) )", ") ) ) # reload p = IECore.BasicPreset( os.path.join( savePath,", "p2 = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest2.cob\" ) ) self.assertTrue( p2.applicableTo(", "savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" ) ) self.assertEqual( len( messageHandler.messages ), 0", "\"maths\", \"multiply\" ), 2 ), ( \"coIO\", \"compoundObjectInOut\", 1 ),", ") self.assertEqual( p.parameters().getValue(), v ) def tearDown( self ) :", "= IECore.BasicPreset( testObj, testObj.parameters() ) # Save for the classLoader", "INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF", "\"\", 1.0 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" )", "OUT OF THE USE OF THIS # SOFTWARE, EVEN IF", "conditions and the following disclaimer. # # * Redistributions in", "p = IECore.BasicPreset( testObj, testObj.parameters() ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()", "testObj, testObj.parameters() ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse(", "loader = IECore.ClassLoader( IECore.SearchPath( savePath ) ) p = loader.load(", "BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY,", "testObj2.parameters()[\"c\"].getClass( True ) self.assertNotEqual( classes1[1:], classes2[1:] ) p = IECore.BasicPreset(", "p( testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(),", ") p.metadata() def testClasses( self ) : testObj = IECore.Parameterised(", "\"\", \"IECORE_OP_PATHS\" ), ] ) classes1 = [ c[1:] for", "p.parameters() ) self.assertTrue( preset.applicableTo( p, p.parameters() ) ) p.parameters().setValue( p.parameters().defaultValue", "to endorse or # promote products derived from this software", ") self.assertTrue( isinstance( p, IECore.BasicPreset ) ) p.metadata() def testClasses(", "False, True ] ) ) v = p.parameters().getValue().copy() preset =", "\"IECORE_OP_PATHS\" ), ] ) classes1 = [ c[1:] for c", "BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF #", "\"a\", \"\", False ), IECore.FloatParameter( \"c\", \"\", 0.0 ), ]", "the 'loadability' later... preset.save( savePath, \"basicPresetTest\" ) self.assertTrue( os.path.isfile( os.path.join(", "# other contributors to this software may be used to", "# save without the classLoader and check its there preset.save(", "p.applicableTo( testObj2, testObj2.parameters() ) ) testObj.parameters()[\"a\"].setTypedValue( False ) testObj.parameters()[\"b\"].setTypedValue( 0.0", "p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest.cob\" ) ) self.assertTrue( p.applicableTo(", "this list of conditions and the following disclaimer. # #", "notice, this list of conditions and the following disclaimer in", "WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES", "= IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False", "\"basicPresetTest-1.cob\" ) ) ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.py\"", "testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"] ) ) self.assertFalse(", "c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertNotEqual( classes1,", "savePath, \"basicPresetTest\" ), os.path.join( savePath, \"basicPresetTest.cob\" ), os.path.join( savePath, \"basicPresetTest2.cob\"", "testObj, testObj.parameters() ) ) self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() ) )", ") self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest.cob\" ) ) ) #", "messageHandler.messages ), 0 ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) )", "later... preset.save( savePath, \"basicPresetTest\" ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\",", "All rights reserved. # # Redistribution and use in source", "p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [ \"1\", \"2\", \"3\" ] ) ) p[\"c\"][\"b\"].setValue(", ") elif os.path.isfile( p ) : os.remove( p ) if", "), os.path.join( savePath, \"basicPresetTest2.cob\" ), os.path.join( savePath, \"basicPresetTestClassLoader\" ), )", "without # modification, are permitted provided that the following conditions", ") p2( testObj2, testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual(", ") ) ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.py\" )", "), 2 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" )", "elif os.path.isfile( p ) : os.remove( p ) if __name__", "= testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertEqual(", "the classLoader and check its there preset.save( savePath, \"basicPresetTest\", classLoadable=False", "= IECore.BasicPreset( os.path.join( savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" ) ) self.assertEqual( len(", "[ ( \"mult\", os.path.join( \"maths\", \"multiply\" ), 2 ), (", "# notice, this list of conditions and the following disclaimer", "\"1\", \"2\", \"3\" ] ) ) p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [ True,", "check its there, we test the 'loadability' later... preset.save( savePath,", "this list of conditions and the following disclaimer in the", "\"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter(", "self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) def testSave( self ) : testObj", "[ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassParameter( \"b\", \"\", \"IECORE_OP_PATHS\",", "messages are emitted during loading messageHandler = IECore.CapturingMessageHandler() with messageHandler", "IECore.ClassParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1 = testObj.parameters()[\"b\"].getClass(", "HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER", "os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) messageHandler =", ") p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [ \"1\", \"2\", \"3\" ] ) )", "savePath, \"basicPresetTestClassLoader\" ), ) for p in paths : if", "testObj.parameters() ) ) self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() ) ) p2(", "EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, #", "LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN", "1.0 ) def testSave( self ) : testObj = IECore.Parameterised(", "\"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1 = [ c[1:]", "True ) self.assertNotEqual( classes1[1:], classes2[1:] ) p = IECore.BasicPreset( testObj,", ") self.assertTrue( preset.applicableTo( p, p.parameters() ) ) p.parameters().setValue( p.parameters().defaultValue )", "Image Engine Design nor the names of any # other", "testObj.parameters()[\"a\"].setTypedValue( False ) testObj.parameters()[\"b\"].setTypedValue( 0.0 ) p( testObj, testObj.parameters() )", ") : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter(", "testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 )", "True ), IECore.ClassParameter( \"b\", \"\", \"IECORE_OP_PATHS\", os.path.join( \"maths\", \"multiply\" ),", "classes1, classes2 ) def testCompoundVectorParameter( self ) : p =", "reserved. # # Redistribution and use in source and binary", "testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters()", "0.0 ), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname( __file__", ") ) v = p.parameters().getValue().copy() preset = IECore.BasicPreset( p, p.parameters()", "########################################################################## # # Copyright (c) 2010-2012, Image Engine Design Inc.", "= IECore.Parameterised( \"testParameterised1\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False", "\"\", True ), IECore.FloatParameter( \"b\", \"\", 1.0 ), ] )", "IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassVectorParameter( \"b\", \"\", \"IECORE_OP_PATHS\" ),", "BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY", "ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,", "for the classLoader and check its there, we test the", "LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", "LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING #", "testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testClassLoader( self", ") self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) p2", "IECore.FloatParameter( \"c\", \"\", 0.0 ), ] ) savePath = os.path.abspath(", "IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ),", "Redistributions of source code must retain the above copyright #", "self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) preset2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=(", "p.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()[\"c\"] )", "__future__ import with_statement import os import sys import shutil import", "savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\" ) ) ) self.assertTrue( os.path.isfile( os.path.join( savePath,", "] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter(", "import with_statement import os import sys import shutil import unittest", "), os.path.join( savePath, \"basicPresetTestClassLoader\" ), ) for p in paths", "and check its there, we test the 'loadability' later... preset.save(", "the names of any # other contributors to this software", "testObj2, testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0", "c in testObj.parameters()[\"b\"].getClasses( True ) ] classes2 = [ c[1:]", "IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter( \"c\", \"\", 0.0 ),", "# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS", "ANY WAY OUT OF THE USE OF THIS # SOFTWARE,", "= IECore.ClassLoader( IECore.SearchPath( savePath ) ) p = loader.load( \"basicPresetTestClassLoader\"", "True ) self.assertEqual( classes1[1:], classes2[1:] ) def testClassVectors( self )", "testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) preset2 = IECore.BasicPreset(", "\"basicPresetTest2.cob\" ), os.path.join( savePath, \"basicPresetTestClassLoader\" ), ) for p in", "# * Redistributions of source code must retain the above", "LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION)", "\"\", IECore.StringVectorData() ), IECore.BoolVectorParameter( \"b\", \"\", IECore.BoolVectorData() ), ] )", "during loading messageHandler = IECore.CapturingMessageHandler() with messageHandler : loader =", "[ c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertNotEqual(", "self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testLoad(", "= ( os.path.join( savePath, \"basicPresetTest\" ), os.path.join( savePath, \"basicPresetTest.cob\" ),", "OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE", "\"compoundObjectInOut\", 1 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" )", "\"\", 0.0 ), ] ) p = IECore.BasicPreset( testObj, testObj.parameters()", "\"\", True ), IECore.ClassParameter( \"b\", \"\", \"IECORE_OP_PATHS\", os.path.join( \"maths\", \"multiply\"", "[ \"1\", \"2\", \"3\" ] ) ) p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [", ") self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"] ) ) self.assertFalse( p.applicableTo( testObj,", "OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON", "testObj, testObj.parameters() ) # Save for the classLoader and check", "any # other contributors to this software may be used", ") messageHandler = IECore.CapturingMessageHandler() with messageHandler : p = IECore.BasicPreset(", "the classLoader and check its there, we test the 'loadability'", "permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT", "classes2 ) def testCompoundVectorParameter( self ) : p = IECore.Parameterised(", ") testObj2 = IECore.Parameterised( \"testParameterised1\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\",", ") preset.save( savePath, \"basicPresetTestClassLoader\" ) # make sure that no", ") self.assertEqual( classes1[1:], classes2[1:] ) def testClassVectors( self ) :", "are permitted provided that the following conditions are # met:", "DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE", "testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testClassLoader( self ) : testObj =", "testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() )", "] self.assertEqual( classes1, classes2 ) def testCompoundVectorParameter( self ) :", "COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS", "Save for the classLoader and check its there, we test", ") p = loader.load( \"basicPresetTestClassLoader\" )() self.assertEqual( len( messageHandler.messages ),", "testClasses( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters(", "Redistributions in binary form must reproduce the above copyright #", "\"\", IECore.BoolVectorData() ), ] ) ] ) p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [", "self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) p2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=(", "\"basicPresetTest2.cob\" ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue(", "reload p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest.cob\" ) ) self.assertTrue(", "testObj.parameters() ) ) self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()[\"c\"] ) ) p(", "2010-2012, Image Engine Design Inc. All rights reserved. # #", "1 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters(", "testObj.parameters() ) ) p2( testObj2, testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True", "IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"] ) )", "0 ) self.assertTrue( isinstance( p, IECore.BasicPreset ) ) p.metadata() def", "testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\",", "test the 'loadability' later... preset.save( savePath, \"basicPresetTest\" ) self.assertTrue( os.path.isfile(", "p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() )", "\"basicPresetTestClassLoader\" )() self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( isinstance(", "# # Copyright (c) 2010-2012, Image Engine Design Inc. All", "os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.py\" ) ) ) # save", "classLoader and check its there, we test the 'loadability' later...", "source code must retain the above copyright # notice, this", "SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS", "os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) preset", "= IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassVectorParameter( \"c\", \"\", \"IECORE_OP_PATHS\"", "= [ c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True ) ]", "in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertNotEqual( classes1, classes2 ) p", ") p.parameters().setValue( p.parameters().defaultValue ) self.assertNotEqual( p.parameters().getValue(), v ) preset( p,", ") testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassVectorParameter( \"b\",", ") testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\",", "derived from this software without specific prior # written permission.", "for p in paths : if os.path.isdir( p ) :", "True ) ] self.assertEqual( classes1, classes2 ) def testCompoundVectorParameter( self", "software without specific prior # written permission. # # THIS", "\"c\", \"\", members = [ IECore.StringVectorParameter( \"s\", \"\", IECore.StringVectorData() ),", "PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER", "p.parameters().getValue(), v ) def tearDown( self ) : savePath =", "in testObj.parameters()[\"b\"].getClasses( True ) ] classes2 = [ c[1:] for", ") self.assertNotEqual( p.parameters().getValue(), v ) preset( p, p.parameters() ) self.assertEqual(", "CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, #", "True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) preset2 = IECore.BasicPreset( testObj,", ") ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.py\" ) )", "os.path.join( \"maths\", \"multiply\" ), 2 ), ( \"coIO\", \"compoundObjectInOut\", 1", "OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED", "IECore class TestBasicPreset( unittest.TestCase ) : def testCopy( self )", "\"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassVectorParameter(", ") testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter( \"c\",", ") : os.remove( p ) if __name__ == \"__main__\": unittest.main()", "following disclaimer in the # documentation and/or other materials provided", "messageHandler : p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" )", ") ) preset = IECore.BasicPreset( testObj, testObj.parameters() ) # Save", "messageHandler.messages ), 0 ) self.assertTrue( isinstance( p, IECore.BasicPreset ) )", "shutil import unittest import IECore class TestBasicPreset( unittest.TestCase ) :", "True, False, True ] ) ) v = p.parameters().getValue().copy() preset", "p.parameters() ) ) p.parameters().setValue( p.parameters().defaultValue ) self.assertNotEqual( p.parameters().getValue(), v )", "= IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest.cob\" ) ) self.assertTrue( p.applicableTo( testObj,", "IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) savePath = os.path.abspath(", "] ) savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\",", "testObj2.parameters().addParameters( [ IECore.ClassVectorParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1", "p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() )", "os.path.join( savePath, \"basicPresetTest\" ), os.path.join( savePath, \"basicPresetTest.cob\" ), os.path.join( savePath,", "from __future__ import with_statement import os import sys import shutil", "be used to endorse or # promote products derived from", "IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ##########################################################################", "savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" )", "), 0 ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse(", "preset2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) preset2.save(", "\"multiply\" ), 2 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\"", ") testObj2.parameters().addParameters( [ IECore.ClassParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] )", "\"b\", \"\", IECore.BoolVectorData() ), ] ) ] ) p[\"c\"][\"s\"].setValue( IECore.StringVectorData(", "SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS", "self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( isinstance( p, IECore.BasicPreset", "self.assertEqual( classes1[1:], classes2[1:] ) def testClassVectors( self ) : testObj", "[ IECore.BoolParameter( \"a\", \"\", False ), IECore.CompoundVectorParameter( \"c\", \"\", members", "testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertEqual( classes1, classes2 ) def testCompoundVectorParameter(", "there, we test the 'loadability' later... preset.save( savePath, \"basicPresetTest\" )", "IECore.CapturingMessageHandler() with messageHandler : p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetLoadTest\",", "self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) def testSave(", "import IECore class TestBasicPreset( unittest.TestCase ) : def testCopy( self", "testObj.parameters() ) preset.save( savePath, \"basicPresetTestClassLoader\" ) # make sure that", "IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassVectorParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ),", "emitted during loading messageHandler = IECore.CapturingMessageHandler() with messageHandler : loader", ") ) # save without the classLoader and check its", "self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) preset2 =", "#reload p2 = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest2.cob\" ) ) self.assertTrue(", "\"b\", \"\", 1.0 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised1\"", ") ) ) # save without the classLoader and check", ") self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) preset2", "(INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS", "= IECore.BasicPreset( p, p.parameters() ) self.assertTrue( preset.applicableTo( p, p.parameters() )", "products derived from this software without specific prior # written", "sys import shutil import unittest import IECore class TestBasicPreset( unittest.TestCase", "self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() ) ) p2( testObj2, testObj2.parameters() )", ") p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo( testObj,", "# ########################################################################## from __future__ import with_statement import os import sys", "and the following disclaimer in the # documentation and/or other", "testObj.parameters()[\"a\"], ) ) preset2.save( savePath, \"basicPresetTest2\", classLoadable=False ) #reload p2", "True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testClassLoader( self )", "IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS", "testObj2.parameters()[\"c\"] ) classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass(", "] ) ) p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [ True, False, True ]", "IECore.BasicPreset( p, p.parameters() ) self.assertTrue( preset.applicableTo( p, p.parameters() ) )", "\"multiply\" ), 2 ), ( \"coIO\", \"compoundObjectInOut\", 1 ), ]", "1.0 ) p2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], )", ") testObj.parameters()[\"a\"].setTypedValue( False ) testObj.parameters()[\"b\"].setTypedValue( 0.0 ) p( testObj, testObj.parameters()", "nor the names of any # other contributors to this", "IECore.ClassVectorParameter( \"b\", \"\", \"IECORE_OP_PATHS\" ), ] ) testObj.parameters()[\"b\"].setClasses( [ (", "ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR", "\"basicPresetTestClassLoader\" ) # make sure that no messages are emitted" ]
[ "a parameter. Based on above, total dimensionality of state vector", "import layout, pacman, game, ghostAgents from .PacmanPackage import graphicsDisplay import", "# Sleep for 0.1 sec def step(self, a): \"\"\" Applies", "eaten stuff for graphics (original code assumes # graphics are", "2]) statespace_limits.append([0, self._max_scared_time]) statespace_limits += [[0, 1]] * ( self.num_total_food", "as a wrapper for the Pacman implementation from the BerkeleyX/CS188.1x", "the Pacman domain to the given state. Internal states accounted", "= (x, self.layout_copy.height - y) if coord in data.capsules: s[i]", "a wrapper for the Pacman implementation from the BerkeleyX/CS188.1x course", "layouts shipped with rlpy default_layout_dir = os.path.join( __rlpy_location__, \"Domains\", \"PacmanPackage\",", "char == \".\": s[i] = data.food[x][y] i += 1 elif", "# Random Move a = self.random_state.choice(self.possibleActions()) a = self.actions[a] next_state_p", "= data.agentStates # set pacman position agent_states.configuration.pos = (s[0], s[1])", "random move instead the one specified by the action \"\"\"", "format layout_file_content = self._tryToLoad(self.layoutFile) self.layout = layout.Layout(layout_file_content) # Number of", "* ( self.num_total_food + self.num_total_capsules) self.statespace_limits = np.array(statespace_limits, dtype=\"float\") def", "the proper indication to step function. Accounts for scoring changes", "domain.\"\"\" from rlpy.Tools import __rlpy_location__ from .Domain import Domain from", "Pacman only supports internal states.'\\ 'If you do pass a", "long the ghost remains scared after consuming a capsule.) *", "isTerminal(self): \"\"\" Checks whether the game should terminate at the", "not * [nc] binary variables for each capsule indicating if", "the implementation from the `BerkeleyX/CS188.1x course project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See", "\"West\"] actions_num = 5 episodeCap = 1000 #: location of", "nc). **ACTIONS:** Move Pacman [up, down, left, right, stay] **REWARD:**", "3*ng + nf + nc). **ACTIONS:** Move Pacman [up, down,", "= s.data.food for agent in range(len(s.data.agentStates)): s.data._agentMoved = agent self.gameDisplay.update(s.data)", "return self.game_state.data._lose or self.game_state.data._win def _defaultSettings(self): self.ghostNum = 2 self.ghosts", "* 3 + 2] = agent_states[i + 1].scaredTimer # get", "= \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\" __credits__ = [\"<NAME>\", \"<NAME>\", \"<NAME>\",", "ie eaten by ghost or out of time, and for", "Berkeley project website below for more info. .. note:: The", "Applies actions from outside the Pacman domain to the given", "- y) if s_food[i]: data.capsules.append(coord) i += 1 elif char", "== \"\\n\": y += 1 x = -1 elif char", "self.game_state.data.agentStates num_ghosts = len(agent_states) - 1 s = np.zeros( 2", "`Domains/PacmanPackage` folder. \"\"\" _max_scared_time = 39 actions = [\"Stop\", \"North\",", "a capsule.) * [nf] binary variables indicating if a food", "with scoring and terminal checking. Returns a tuple of form", "in possibleMoves: possibleActions.append(self.actions.index(a)) return np.array(possibleActions) def isTerminal(self): \"\"\" Checks whether", "= np.array(statespace_limits, dtype=\"float\") def _set_state(self, s): \"\"\" Takes a vector", "= [ghostAgents.RandomGhost( game.Agent) for i in range(self.ghostNum)] self.beQuiet = False", "and sets the internal game state used by the original", "errStr = 'ERROR: In Pacman.py, attempted to pass a state", "if coord in data.capsules: s[i] = 1. i += 1", "from the `BerkeleyX/CS188.1x course project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the original", "the given state. Internal states accounted for along with scoring", "file noise: with this probability pacman makes a random move", "i in range(self.ghostNum)] self.beQuiet = False def _tryToLoad(self, fullname): #", "s[:2] = agent_states[0].configuration.pos # import ipdb; ipdb.set_trace() # get ghost", "still on the board or not * [nc] binary variables", "track of eaten stuff for graphics (original code assumes #", "\"East\", \"South\", \"West\"] actions_num = 5 episodeCap = 1000 #:", "for agent in range(len(s.data.agentStates)): s.data._agentMoved = agent self.gameDisplay.update(s.data) s._foodEaten =", "= next_state.data.score - self.game_state.data.score self.game_state = next_state terminal = self.isTerminal()", "1000 #: location of layouts shipped with rlpy default_layout_dir =", "s[(3 * i) - 1:3 * i] agent_states[i].configuration.pos = (part_s[0],", "(zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more details of the domain see the", "state vector is map-dependent, and given by (2 + 3*ng", "= pacman.ClassicGameRules(timeout=30) self.layout_copy = deepcopy(self.layout) self.game = self.game_rules.newGame( self.layout_copy, pacman,", "deepcopy(self.layout) self.game = self.game_rules.newGame( self.layout_copy, pacman, self.ghosts, DummyGraphics(), self.beQuiet, catchExceptions=False)", "np.array(possibleActions) def isTerminal(self): \"\"\" Checks whether the game should terminate", "keep track of eaten stuff for graphics (original code assumes", "actually visible, de-comment time.sleep() in the showDomain() method. **REFERENCE:** This", "next_state = next_state.generateSuccessor(i, randomAction) # keep track of eaten stuff", "\".\": s[i] = data.food[x][y] i += 1 elif char ==", "class Pacman(Domain): \"\"\" Pacman domain, which acts as a wrapper", "\"\\n\": y += 1 x = -1 elif char ==", "is how long the ghost remains scared after consuming a", "visible, de-comment time.sleep() in the showDomain() method. **REFERENCE:** This domain", "want self.layoutFile = layoutFile # Puts the file in line", "scared after consuming a capsule.) * [nf] binary variables indicating", "= None self.gameDisplay = None self._set_statespace_limits() super(Pacman, self).__init__() def _set_statespace_limits(self):", ".Domain import Domain from .PacmanPackage import layout, pacman, game, ghostAgents", "= pacman.ClassicGameRules(timeout) self.layout_copy = deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food = len(self.layout_copy.food.asList())", "\"<NAME>\", \"<NAME>\", \"<NAME>\"] __license__ = \"BSD 3-Clause\" __author__ = \"<NAME>\"", "3 + self.num_total_food + self.num_total_capsules) # get pacman position s[:2]", "get food and capsules status i = 2 + num_ghosts", "- 1:3 * i] agent_states[i].configuration.pos = (part_s[0], part_s[1]) agent_states[i].scaredTimer =", "# eaten) for ghost in self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width - 2])", "self._cleanup_graphics: self._cleanup_graphics = False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food = self.gameDisplay.drawFood( self.gameDisplay.layout.food)", "are updated after every agent's move) next_state.data._foodEaten = next_state_p.data._foodEaten next_state.data._capsuleEaten", "note:: The visualization runs as fast as your CPU will", "self.game_state.data.score self.game_state = next_state terminal = self.isTerminal() return r, self._get_state(),", "wrapper for the implementation from the `BerkeleyX/CS188.1x course project 3", "a tuple of form (reward, new state vector, terminal) \"\"\"", "-1 x += 1 def _get_state(self): \"\"\" get the internal", "locations and scaredTimer (how long they can be # eaten)", "= \"BSD 3-Clause\" __author__ = \"<NAME>\" class Pacman(Domain): \"\"\" Pacman", "= [] for char in str(self.layout_copy): if char == \".\":", "possibleMoves = pacman.GameState.getLegalActions( self.game_state, agentIndex=0) for a in possibleMoves: possibleActions.append(self.actions.index(a))", "i] agent_states[i].configuration.pos = (part_s[0], part_s[1]) agent_states[i].scaredTimer = part_s[2] # set", "map-dependent, and *ng* can be set as a parameter. Based", "and capsules locations s_food = s[(num_ghosts + 1) * 3:]", "as a parameter. Based on above, total dimensionality of state", "pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout) self.layout_copy = deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food", "self._get_state(), terminal, self.possibleActions() def s0(self): \"\"\" re-initializes internal states when", "self._cleanup_graphics = True return self.state, self.isTerminal(), self.possibleActions() def possibleActions(self): if", "return grid class DummyGraphics(object): def initialize(self, *arg, **kwargs): pass def", "failure, ie eaten by ghost or out of time, and", "pacman or a ghost moves. # s.data.food is the correct", "gamestate instance and updates # the display every time pacman", "char == \"o\": coord = (x, self.layout_copy.height - y) if", "* [2] The x and y coordinates of pacman *", "showDomain(); Pacman only supports internal states.'\\ 'If you do pass", "is still on the board or not * [nc] binary", "= (x, self.layout_copy.height - y) if s_food[i]: data.capsules.append(coord) i +=", "coordinates of pacman * [3 * ng] the x and", "= part_s[2] # set food and capsules locations s_food =", "The visualization runs as fast as your CPU will permit;", "state used by the original pacman package. \"\"\" # copies", "self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) statespace_limits.append([0, self._max_scared_time]) statespace_limits", "dimensions: * [2] The x and y coordinates of pacman", "or next_state.isLose(): break ghostOptions = pacman.GhostRules.getLegalActions(next_state, i) # TODO: use", "import deepcopy import os import time __copyright__ = \"Copyright 2013,", "showDomain(self, a, s=None): if s is not None: errStr =", "= s[(3 * i) - 1:3 * i] agent_states[i].configuration.pos =", "represented as a numpy array \"\"\" data = self.game_state.data agent_states", "0 y = 0 i = 0 data.capsules = []", "more info. .. note:: The visualization runs as fast as", "future return np.array([0]) # makes an array of possible actions", "os import time __copyright__ = \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\" __credits__", "action \"a\" in current state object # pacman.PacmanRules.applyAction(self.game_state, a) #", "self.game_state.data agent_states = self.game_state.data.agentStates num_ghosts = len(agent_states) - 1 s", "Exception(errStr) s = self.game_state if self.gameDisplay is None: self.gameDisplay =", "elif char == \"o\": coord = (x, self.layout_copy.height - y)", "new state vector, terminal) \"\"\" if self.random_state.random_sample() < self.noise: #", "data.food[x][y] i += 1 elif char == \"\\n\": y +=", "None self.timerswitch = False self.savedtimer = None self.gameDisplay = None", "hacky, but should not matter anyway, maybe clean up in", "consuming a capsule.) * [nf] binary variables indicating if a", "= next_state_p.data._capsuleEaten # scoring in pacman r = next_state.data.score -", "a food is still on the board or not *", "vector, terminal) \"\"\" if self.random_state.random_sample() < self.noise: # Random Move", ".PacmanPackage import layout, pacman, game, ghostAgents from .PacmanPackage import graphicsDisplay", "self.statespace_limits = np.array(statespace_limits, dtype=\"float\") def _set_state(self, s): \"\"\" Takes a", "i * 3 + 2] = agent_states[i + 1].configuration.pos s[2", "False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food = self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules = self.gameDisplay.drawCapsules(", "**kwargs): pass def update(self, *arg, **kwargs): pass def finalize(self, *arg,", "implementation from the BerkeleyX/CS188.1x course project 3. **STATE:** The state", "indicating if it is still on the board or not", "the scare time of each ghost (\"scare time\" is how", "pacman.PacmanRules.applyAction(self.game_state, a) # pacman.GhostRules.checkDeath(self.game_state, 0) # the ghosts move randomly", "super(Pacman, self).__init__() def _set_statespace_limits(self): # Makes an array of limits", "time __copyright__ = \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\" __credits__ = [\"<NAME>\",", "fast as your CPU will permit; to slow things down", "file in line stripped format layout_file_content = self._tryToLoad(self.layoutFile) self.layout =", "len(agent_states) - 1 for i in range(1, num_ghosts + 1):", "from .Domain import Domain from .PacmanPackage import layout, pacman, game,", "next_state_p.data._foodEaten next_state.data._capsuleEaten = next_state_p.data._capsuleEaten # scoring in pacman r =", "is an RLPy wrapper for the implementation from the `BerkeleyX/CS188.1x", "self.game_rules.newGame( self.layout_copy, pacman, self.ghosts, DummyGraphics(), self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics", "- 2]) # adds ghost x, y locations and scaredTimer", "1 x = -1 x += 1 def _get_state(self): \"\"\"", "parameter, ensure it is set to None.' raise Exception(errStr) s", "f = open(fullname) grid = [line.strip() for line in f]", "copies most recent state data = self.game_state.data agent_states = data.agentStates", "map eaten.) If game should terminate, returns the proper indication", "# keep track of eaten stuff for graphics (original code", "maybe clean up in # the future return np.array([0]) #", "game state represented as a numpy array \"\"\" data =", "self.noise: # Random Move a = self.random_state.choice(self.possibleActions()) a = self.actions[a]", "= True return self.state, self.isTerminal(), self.possibleActions() def possibleActions(self): if self.isTerminal():", "layout.Layout(layout_file_content) # Number of ghosts self.numGhostAgents = numGhostAgents # Intitializes", "a numpy array \"\"\" data = self.game_state.data agent_states = self.game_state.data.agentStates", "Based on above, total dimensionality of state vector is map-dependent,", "out of time, and for success, all food on map", "agent_states[i].configuration.pos = (part_s[0], part_s[1]) agent_states[i].scaredTimer = part_s[2] # set food", "scoring changes in terminal states. \"\"\" return self.game_state.data._lose or self.game_state.data._win", "of layouts shipped with rlpy default_layout_dir = os.path.join( __rlpy_location__, \"Domains\",", "website below for more info. .. note:: The visualization runs", "# get pacman position s[:2] = agent_states[0].configuration.pos # import ipdb;", "-1 elif char == \"o\": coord = (x, self.layout_copy.height -", "of limits for each dimension in the state vector. statespace_limits", "Internal states accounted for along with scoring and terminal checking.", "s[2 + i * 3: 2 + i * 3", "and y coordinates of pacman * [3 * ng] the", "above, total dimensionality of state vector is map-dependent, and given", "getLayout function f = open(fullname) grid = [line.strip() for line", "used by the original pacman package. \"\"\" # copies most", "# set ghost position num_ghosts = len(agent_states) - 1 for", "\"North\", \"East\", \"South\", \"West\"] actions_num = 5 episodeCap = 1000", "possibleActions.append(self.actions.index(a)) return np.array(possibleActions) def isTerminal(self): \"\"\" Checks whether the game", "[up, down, left, right, stay] **REWARD:** See the Berkeley project", "pacman can perform at any given # state possibleActions =", "catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics = True return self.state, self.isTerminal(), self.possibleActions()", "= agent_states[i + 1].scaredTimer # get food and capsules status", "is not None: errStr = 'ERROR: In Pacman.py, attempted to", "how long the ghost remains scared after consuming a capsule.)", "x, y locations and scaredTimer (how long they can be", "each dimension in the state vector. statespace_limits = [] #", "ghosts self.numGhostAgents = numGhostAgents # Intitializes Pacman game self.game_state =", "Pacman [up, down, left, right, stay] **REWARD:** See the Berkeley", "+ 1].configuration.pos s[2 + i * 3 + 2] =", "self.ghosts = [ghostAgents.RandomGhost( game.Agent) for i in range(self.ghostNum)] self.beQuiet =", "i) - 1:3 * i] agent_states[i].configuration.pos = (part_s[0], part_s[1]) agent_states[i].scaredTimer", "= len(self.layout_copy.food.asList()) self.num_total_capsules = len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics = None self.timerswitch", "eaten by ghost or out of time, and for success,", "s[i] = 1. i += 1 x += 1 return", "board or not *nf* and *nc* are map-dependent, and *ng*", "the display every time pacman or a ghost moves. #", "layoutFile # Puts the file in line stripped format layout_file_content", "sec def step(self, a): \"\"\" Applies actions from outside the", "scoring and terminal checking. Returns a tuple of form (reward,", "assumes # graphics are updated after every agent's move) next_state.data._foodEaten", "the original `source code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more details of", "of time, and for success, all food on map eaten.)", "domain is an RLPy wrapper for the implementation from the", "i in range(1, len(self.game_state.data.agentStates)): if next_state.isWin() or next_state.isLose(): break ghostOptions", "self.layout.height - 2]) # adds ghost x, y locations and", "**STATE:** The state vector has a series of dimensions: *", "the future return np.array([0]) # makes an array of possible", "True return self.state, self.isTerminal(), self.possibleActions() def possibleActions(self): if self.isTerminal(): #", "= False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food = self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules =", "state object # pacman.PacmanRules.applyAction(self.game_state, a) # pacman.GhostRules.checkDeath(self.game_state, 0) # the", "import __rlpy_location__ from .Domain import Domain from .PacmanPackage import layout,", "slow things down so gameplay is actually visible, de-comment time.sleep()", "data = self.game_state.data agent_states = self.game_state.data.agentStates num_ghosts = len(agent_states) -", "__author__ = \"<NAME>\" class Pacman(Domain): \"\"\" Pacman domain, which acts", "Pacman.py, attempted to pass a state (s)'\\ 'to showDomain(); Pacman", "rlpy.Tools import __rlpy_location__ from .Domain import Domain from .PacmanPackage import", "_get_state(self): \"\"\" get the internal game state represented as a", "at any given # state possibleActions = [] possibleMoves =", "or not * [nc] binary variables for each capsule indicating", "Pacman world you want self.layoutFile = layoutFile # Puts the", "range(len(s.data.agentStates)): s.data._agentMoved = agent self.gameDisplay.update(s.data) s._foodEaten = None s._capsuleEaten =", "= [line.strip() for line in f] f.close() return grid class", "do pass a state parameter, ensure it is set to", "array of limits for each dimension in the state vector.", "default_layout_dir = os.path.join( __rlpy_location__, \"Domains\", \"PacmanPackage\", \"layouts\") def __init__(self, noise=.1,", "x = -1 x += 1 def _get_state(self): \"\"\" get", "**ACTIONS:** Move Pacman [up, down, left, right, stay] **REWARD:** See", "def update(self, *arg, **kwargs): pass def finalize(self, *arg, **kwargs): pass", "the domain see the original package in the `Domains/PacmanPackage` folder.", "s and sets the internal game state used by the", "all food on map eaten.) If game should terminate, returns", "should not matter anyway, maybe clean up in # the", "every time pacman or a ghost moves. # s.data.food is", "elif char == \"\\n\": y += 1 x = -1", "initialize(self, *arg, **kwargs): pass def update(self, *arg, **kwargs): pass def", "the given state. (Terminate for failure, ie eaten by ghost", "See the Berkeley project website below for more info. ..", "the BerkeleyX/CS188.1x course project 3. **STATE:** The state vector has", "pacman, self.ghosts, DummyGraphics(), self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics = True", "\"\"\" return self.game_state.data._lose or self.game_state.data._win def _defaultSettings(self): self.ghostNum = 2", "len(self.layout_copy.food.asList()) self.num_total_capsules = len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics = None self.timerswitch =", "more details of the domain see the original package in", "\"\"\" Pacman domain, which acts as a wrapper for the", "return np.array([0]) # makes an array of possible actions pacman", "i += 1 x += 1 return s state =", "if next_state.isWin() or next_state.isLose(): break ghostOptions = pacman.GhostRules.getLegalActions(next_state, i) #", "next_state.data._foodEaten = next_state_p.data._foodEaten next_state.data._capsuleEaten = next_state_p.data._capsuleEaten # scoring in pacman", "each ghost (\"scare time\" is how long the ghost remains", "+= 1 x += 1 return s state = property(_get_state,", "deepcopy import os import time __copyright__ = \"Copyright 2013, RLPy", "= [\"Stop\", \"North\", \"East\", \"South\", \"West\"] actions_num = 5 episodeCap", "coord = (x, self.layout_copy.height - y) if coord in data.capsules:", "permit; to slow things down so gameplay is actually visible,", "parameter. Based on above, total dimensionality of state vector is", "the original pacman package. \"\"\" # copies most recent state", "= next_state terminal = self.isTerminal() return r, self._get_state(), terminal, self.possibleActions()", "return s state = property(_get_state, _set_state) def showDomain(self, a, s=None):", "len(agent_states) - 1 s = np.zeros( 2 + num_ghosts *", "in line stripped format layout_file_content = self._tryToLoad(self.layoutFile) self.layout = layout.Layout(layout_file_content)", "# Number of ghosts self.numGhostAgents = numGhostAgents # Intitializes Pacman", "next_state.isWin() or next_state.isLose(): break ghostOptions = pacman.GhostRules.getLegalActions(next_state, i) # TODO:", "in the showDomain() method. **REFERENCE:** This domain is an RLPy", "code assumes # graphics are updated after every agent's move)", "it is still on the board or not *nf* and", "= self.game_state.data agent_states = self.game_state.data.agentStates num_ghosts = len(agent_states) - 1", "a ghost moves. # s.data.food is the correct food matrix", "domain see the original package in the `Domains/PacmanPackage` folder. \"\"\"", "Returns a tuple of form (reward, new state vector, terminal)", "and for success, all food on map eaten.) If game", "- 2]) statespace_limits.append([1, self.layout.height - 2]) # adds ghost x,", "next_state terminal = self.isTerminal() return r, self._get_state(), terminal, self.possibleActions() def", "Move Pacman [up, down, left, right, stay] **REWARD:** See the", "self.random_state.randint(len(ghostOptions)) randomAction = ghostOptions[randomAction_ind] next_state = next_state.generateSuccessor(i, randomAction) # keep", "indication to step function. Accounts for scoring changes in terminal", "should terminate, returns the proper indication to step function. Accounts", "1 s = np.zeros( 2 + num_ghosts * 3 +", "vector. statespace_limits = [] # adds pacman x, y locations", "food and capsules status i = 2 + num_ghosts *", "**REFERENCE:** This domain is an RLPy wrapper for the implementation", "adds ghost x, y locations and scaredTimer (how long they", "remains scared after consuming a capsule.) * [nf] binary variables", "agent_states[0].configuration.pos # import ipdb; ipdb.set_trace() # get ghost info for", "layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000): \"\"\" layoutFile: filename of the map", "s.data.layout.food = s.data.food for agent in range(len(s.data.agentStates)): s.data._agentMoved = agent", "i) # TODO: use domain random stream randomAction_ind = self.random_state.randint(len(ghostOptions))", "+= 1 x = -1 elif char == \"o\": coord", "= False self.savedtimer = None self.gameDisplay = None self._set_statespace_limits() super(Pacman,", "starts, returns a s vector \"\"\" self.game_state = pacman.GameState() self.game_rules", "of the map file noise: with this probability pacman makes", "*nf* and *nc* are map-dependent, and *ng* can be set", "(how long they can be # eaten) for ghost in", "For more details of the domain see the original package", "self.layout_copy.height - y) if s_food[i]: data.capsules.append(coord) i += 1 elif", "1. i += 1 x += 1 return s state", "ipdb; ipdb.set_trace() # get ghost info for i in range(num_ghosts):", "ghost moves. # s.data.food is the correct food matrix s.data.layout.food", "domain to the given state. Internal states accounted for along", "from outside the Pacman domain to the given state. Internal", "if s_food[i]: data.capsules.append(coord) i += 1 elif char == \"\\n\":", "range(1, len(self.game_state.data.agentStates)): if next_state.isWin() or next_state.isLose(): break ghostOptions = pacman.GhostRules.getLegalActions(next_state,", "\"layouts\") def __init__(self, noise=.1, timeout=30, layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000): \"\"\"", "somewhat hacky, but should not matter anyway, maybe clean up", "for each dimension in the state vector. statespace_limits = []", "a vector s and sets the internal game state used", "self.layout_copy, pacman, self.ghosts, DummyGraphics(), self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics =", "vector s and sets the internal game state used by", "**REWARD:** See the Berkeley project website below for more info.", "= self._tryToLoad(self.layoutFile) self.layout = layout.Layout(layout_file_content) # Number of ghosts self.numGhostAgents", "(\"scare time\" is how long the ghost remains scared after", "self.gameDisplay.update(s.data) s._foodEaten = None s._capsuleEaten = None # time.sleep(0.1) #", "self).__init__() def _set_statespace_limits(self): # Makes an array of limits for", "in pacman gamestate instance and updates # the display every", "== \"o\": coord = (x, self.layout_copy.height - y) if s_food[i]:", "_max_scared_time = 39 actions = [\"Stop\", \"North\", \"East\", \"South\", \"West\"]", "s._capsuleEaten = None # time.sleep(0.1) # Sleep for 0.1 sec", "# the ghosts move randomly for i in range(1, len(self.game_state.data.agentStates)):", "= next_state_p.data._foodEaten next_state.data._capsuleEaten = next_state_p.data._capsuleEaten # scoring in pacman r", "None self._set_statespace_limits() super(Pacman, self).__init__() def _set_statespace_limits(self): # Makes an array", "- 2]) statespace_limits.append([0, self._max_scared_time]) statespace_limits += [[0, 1]] * (", "(reward, new state vector, terminal) \"\"\" if self.random_state.random_sample() < self.noise:", "self.game_state = next_state terminal = self.isTerminal() return r, self._get_state(), terminal,", "runs as fast as your CPU will permit; to slow", "+ nc). **ACTIONS:** Move Pacman [up, down, left, right, stay]", "ng] the x and y coordinates as well as the", "Takes a vector s and sets the internal game state", "= 0 y = 0 i = 0 data.capsules =", "- 1 for i in range(1, num_ghosts + 1): part_s", "current state object # pacman.PacmanRules.applyAction(self.game_state, a) # pacman.GhostRules.checkDeath(self.game_state, 0) #", "outside the Pacman domain to the given state. Internal states", "given state. (Terminate for failure, ie eaten by ghost or", "= len(agent_states) - 1 s = np.zeros( 2 + num_ghosts", "None: errStr = 'ERROR: In Pacman.py, attempted to pass a", "s0(self): \"\"\" re-initializes internal states when an episode starts, returns", "next_state.data._capsuleEaten = next_state_p.data._capsuleEaten # scoring in pacman r = next_state.data.score", "RLPy http://acl.mit.edu/RLPy\" __credits__ = [\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\"] __license__", "# get ghost info for i in range(num_ghosts): s[2 +", "'If you do pass a state parameter, ensure it is", "location of layouts shipped with rlpy default_layout_dir = os.path.join( __rlpy_location__,", "data.capsules.append(coord) i += 1 elif char == \"\\n\": y +=", "num_ghosts + 1): part_s = s[(3 * i) - 1:3", "if char == \".\": s[i] = data.food[x][y] i += 1", "* [nc] binary variables for each capsule indicating if it", "'ERROR: In Pacman.py, attempted to pass a state (s)'\\ 'to", "[[0, 1]] * ( self.num_total_food + self.num_total_capsules) self.statespace_limits = np.array(statespace_limits,", "self._defaultSettings() self.restartGraphics = None self.timerswitch = False self.savedtimer = None", "def _get_state(self): \"\"\" get the internal game state represented as", "time.sleep(0.1) # Sleep for 0.1 sec def step(self, a): \"\"\"", "next_state_p.data._capsuleEaten # scoring in pacman r = next_state.data.score - self.game_state.data.score", "self.game_state, agentIndex=0) for a in possibleMoves: possibleActions.append(self.actions.index(a)) return np.array(possibleActions) def", "return r, self._get_state(), terminal, self.possibleActions() def s0(self): \"\"\" re-initializes internal", "matter anyway, maybe clean up in # the future return", "* 3 + 2] = agent_states[i + 1].configuration.pos s[2 +", "self.game_state.data._lose or self.game_state.data._win def _defaultSettings(self): self.ghostNum = 2 self.ghosts =", "the `BerkeleyX/CS188.1x course project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the original `source", "it is set to None.' raise Exception(errStr) s = self.game_state", "perform at any given # state possibleActions = [] possibleMoves", "num_ghosts = len(agent_states) - 1 for i in range(1, num_ghosts", "+ self.num_total_capsules) # get pacman position s[:2] = agent_states[0].configuration.pos #", "for a in possibleMoves: possibleActions.append(self.actions.index(a)) return np.array(possibleActions) def isTerminal(self): \"\"\"", "capsules status i = 2 + num_ghosts * 3 x", "self.gameDisplay = graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics: self._cleanup_graphics =", "# the future return np.array([0]) # makes an array of", "an array of limits for each dimension in the state", "if s is not None: errStr = 'ERROR: In Pacman.py,", "= pacman.GameState.getLegalActions( self.game_state, agentIndex=0) for a in possibleMoves: possibleActions.append(self.actions.index(a)) return", "= self.isTerminal() return r, self._get_state(), terminal, self.possibleActions() def s0(self): \"\"\"", "layout, pacman, game, ghostAgents from .PacmanPackage import graphicsDisplay import numpy", "states. \"\"\" return self.game_state.data._lose or self.game_state.data._win def _defaultSettings(self): self.ghostNum =", "# Makes an array of limits for each dimension in", "any given # state possibleActions = [] possibleMoves = pacman.GameState.getLegalActions(", "get the internal game state represented as a numpy array", "= numGhostAgents # Intitializes Pacman game self.game_state = pacman.GameState() self.game_rules", "statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) statespace_limits.append([0, self._max_scared_time])", "+ i * 3 + 2] = agent_states[i + 1].configuration.pos", "RLPy wrapper for the implementation from the `BerkeleyX/CS188.1x course project", "= self.game_state.generateSuccessor(0, a) next_state = next_state_p # pacman performs action", "next_state.isLose(): break ghostOptions = pacman.GhostRules.getLegalActions(next_state, i) # TODO: use domain", "- 2]) statespace_limits.append([1, self.layout.height - 2]) statespace_limits.append([0, self._max_scared_time]) statespace_limits +=", "game domain.\"\"\" from rlpy.Tools import __rlpy_location__ from .Domain import Domain", "pacman makes a random move instead the one specified by", "your CPU will permit; to slow things down so gameplay", "# somewhat hacky, but should not matter anyway, maybe clean", "dimension in the state vector. statespace_limits = [] # adds", "to step function. Accounts for scoring changes in terminal states.", "ghostAgents from .PacmanPackage import graphicsDisplay import numpy as np from", "total dimensionality of state vector is map-dependent, and given by", "given by (2 + 3*ng + nf + nc). **ACTIONS:**", "CPU will permit; to slow things down so gameplay is", "a s vector \"\"\" self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout=30)", "matrix s.data.layout.food = s.data.food for agent in range(len(s.data.agentStates)): s.data._agentMoved =", "1 def _get_state(self): \"\"\" get the internal game state represented", "[2] The x and y coordinates of pacman * [3", "The x and y coordinates of pacman * [3 *", "# pacman.GhostRules.checkDeath(self.game_state, 0) # the ghosts move randomly for i", "= self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) # converts s vector in pacman gamestate", "y locations and scaredTimer (how long they can be #", "= property(_get_state, _set_state) def showDomain(self, a, s=None): if s is", "def s0(self): \"\"\" re-initializes internal states when an episode starts,", "= len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics = None self.timerswitch = False self.savedtimer", "next_state.generateSuccessor(i, randomAction) # keep track of eaten stuff for graphics", "self.gameDisplay.capsules = self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) # converts s vector in pacman", "If game should terminate, returns the proper indication to step", "game should terminate, returns the proper indication to step function.", "for along with scoring and terminal checking. Returns a tuple", "adds pacman x, y locations statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1,", "in str(self.layout_copy): if char == \".\": s[i] = data.food[x][y] i", "s is not None: errStr = 'ERROR: In Pacman.py, attempted", "\"\"\" _max_scared_time = 39 actions = [\"Stop\", \"North\", \"East\", \"South\",", "a): \"\"\" Applies actions from outside the Pacman domain to", "= ghostOptions[randomAction_ind] next_state = next_state.generateSuccessor(i, randomAction) # keep track of", "def _defaultSettings(self): self.ghostNum = 2 self.ghosts = [ghostAgents.RandomGhost( game.Agent) for", "self.noise = noise # Specifies which Pacman world you want", "time.sleep() in the showDomain() method. **REFERENCE:** This domain is an", "self.game_state if self.gameDisplay is None: self.gameDisplay = graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data)", "char == \"\\n\": y += 1 x = -1 elif", "and updates # the display every time pacman or a", "move randomly for i in range(1, len(self.game_state.data.agentStates)): if next_state.isWin() or", "3 + 2] = agent_states[i + 1].configuration.pos s[2 + i", "- self.game_state.data.score self.game_state = next_state terminal = self.isTerminal() return r,", "locations s_food = s[(num_ghosts + 1) * 3:] x =", "# the display every time pacman or a ghost moves.", "s[(num_ghosts + 1) * 3:] x = 0 y =", "= data.food[x][y] i += 1 elif char == \"\\n\": y", "by (2 + 3*ng + nf + nc). **ACTIONS:** Move", "statespace_limits.append([0, self._max_scared_time]) statespace_limits += [[0, 1]] * ( self.num_total_food +", "None self.gameDisplay = None self._set_statespace_limits() super(Pacman, self).__init__() def _set_statespace_limits(self): #", "Random Move a = self.random_state.choice(self.possibleActions()) a = self.actions[a] next_state_p =", "time, and for success, all food on map eaten.) If", "capsules locations s_food = s[(num_ghosts + 1) * 3:] x", "for i in range(self.ghostNum)] self.beQuiet = False def _tryToLoad(self, fullname):", "series of dimensions: * [2] The x and y coordinates", "# set food and capsules locations s_food = s[(num_ghosts +", "on above, total dimensionality of state vector is map-dependent, and", "self.isTerminal(): # somewhat hacky, but should not matter anyway, maybe", "for failure, ie eaten by ghost or out of time,", "self.gameDisplay is None: self.gameDisplay = graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif", "the map file noise: with this probability pacman makes a", "has a series of dimensions: * [2] The x and", "i in range(1, num_ghosts + 1): part_s = s[(3 *", "state vector, terminal) \"\"\" if self.random_state.random_sample() < self.noise: # Random", "to slow things down so gameplay is actually visible, de-comment", "statespace_limits = [] # adds pacman x, y locations statespace_limits.append([1,", "[nc] binary variables for each capsule indicating if it is", "s state = property(_get_state, _set_state) def showDomain(self, a, s=None): if", "for 0.1 sec def step(self, a): \"\"\" Applies actions from", "internal game state represented as a numpy array \"\"\" data", "from .PacmanPackage import layout, pacman, game, ghostAgents from .PacmanPackage import", "agent_states = self.game_state.data.agentStates num_ghosts = len(agent_states) - 1 s =", "ghosts move randomly for i in range(1, len(self.game_state.data.agentStates)): if next_state.isWin()", "possibleActions(self): if self.isTerminal(): # somewhat hacky, but should not matter", "move instead the one specified by the action \"\"\" self.noise", "see the original package in the `Domains/PacmanPackage` folder. \"\"\" _max_scared_time", "1): part_s = s[(3 * i) - 1:3 * i]", "deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food = len(self.layout_copy.food.asList()) self.num_total_capsules = len(self.layout_copy.capsules) self._defaultSettings()", "s_food[i]: data.capsules.append(coord) i += 1 elif char == \"\\n\": y", "state vector. statespace_limits = [] # adds pacman x, y", "game state used by the original pacman package. \"\"\" #", "[ghostAgents.RandomGhost( game.Agent) for i in range(self.ghostNum)] self.beQuiet = False def", "can perform at any given # state possibleActions = []", "the x and y coordinates as well as the scare", "\"\"\" if self.random_state.random_sample() < self.noise: # Random Move a =", "performs action \"a\" in current state object # pacman.PacmanRules.applyAction(self.game_state, a)", "form (reward, new state vector, terminal) \"\"\" if self.random_state.random_sample() <", "method. **REFERENCE:** This domain is an RLPy wrapper for the", "the file in line stripped format layout_file_content = self._tryToLoad(self.layoutFile) self.layout", "# Intitializes Pacman game self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout)", "the board or not * [nc] binary variables for each", "makes an array of possible actions pacman can perform at", "whether the game should terminate at the given state. (Terminate", "moves. # s.data.food is the correct food matrix s.data.layout.food =", "None: self.gameDisplay = graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics: self._cleanup_graphics", "info. .. note:: The visualization runs as fast as your", "Specifies which Pacman world you want self.layoutFile = layoutFile #", "= self.random_state.randint(len(ghostOptions)) randomAction = ghostOptions[randomAction_ind] next_state = next_state.generateSuccessor(i, randomAction) #", "state (s)'\\ 'to showDomain(); Pacman only supports internal states.'\\ 'If", "from .PacmanPackage import graphicsDisplay import numpy as np from copy", "self.num_total_capsules) self.statespace_limits = np.array(statespace_limits, dtype=\"float\") def _set_state(self, s): \"\"\" Takes", "set to None.' raise Exception(errStr) s = self.game_state if self.gameDisplay", "for each capsule indicating if it is still on the", "grid class DummyGraphics(object): def initialize(self, *arg, **kwargs): pass def update(self,", "def showDomain(self, a, s=None): if s is not None: errStr", "self.game_state.generateSuccessor(0, a) next_state = next_state_p # pacman performs action \"a\"", "most recent state data = self.game_state.data agent_states = data.agentStates #", "char == \".\": data.food[x][y] = bool(s_food[i]) i += 1 elif", "# makes an array of possible actions pacman can perform", "of dimensions: * [2] The x and y coordinates of", "this probability pacman makes a random move instead the one", "[] for char in str(self.layout_copy): if char == \".\": data.food[x][y]", "\"o\": coord = (x, self.layout_copy.height - y) if coord in", "time pacman or a ghost moves. # s.data.food is the", "in data.capsules: s[i] = 1. i += 1 x +=", "clean up in # the future return np.array([0]) # makes", "state = property(_get_state, _set_state) def showDomain(self, a, s=None): if s", "acts as a wrapper for the Pacman implementation from the", "down so gameplay is actually visible, de-comment time.sleep() in the", "* i) - 1:3 * i] agent_states[i].configuration.pos = (part_s[0], part_s[1])", "= None # time.sleep(0.1) # Sleep for 0.1 sec def", "s[i] = data.food[x][y] i += 1 elif char == \"\\n\":", "array of possible actions pacman can perform at any given", "\"<NAME>\"] __license__ = \"BSD 3-Clause\" __author__ = \"<NAME>\" class Pacman(Domain):", "- 1 s = np.zeros( 2 + num_ghosts * 3", "or not *nf* and *nc* are map-dependent, and *ng* can", "self.layout_copy = deepcopy(self.layout) self.game = self.game_rules.newGame( self.layout_copy, pacman, self.ghosts, DummyGraphics(),", "numpy as np from copy import deepcopy import os import", "actions = [\"Stop\", \"North\", \"East\", \"South\", \"West\"] actions_num = 5", "below for more info. .. note:: The visualization runs as", "terminal) \"\"\" if self.random_state.random_sample() < self.noise: # Random Move a", "game.Agent) for i in range(self.ghostNum)] self.beQuiet = False def _tryToLoad(self,", "3 + 2] = agent_states[i + 1].scaredTimer # get food", "class DummyGraphics(object): def initialize(self, *arg, **kwargs): pass def update(self, *arg,", "+ i * 3: 2 + i * 3 +", "course project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the original `source code (zipped)", "and y coordinates as well as the scare time of", "stripped format layout_file_content = self._tryToLoad(self.layoutFile) self.layout = layout.Layout(layout_file_content) # Number", "y) if s_food[i]: data.capsules.append(coord) i += 1 elif char ==", "the original package in the `Domains/PacmanPackage` folder. \"\"\" _max_scared_time =", "self.beQuiet = False def _tryToLoad(self, fullname): # used in getLayout", "self._cleanup_graphics = False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food = self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules", "39 actions = [\"Stop\", \"North\", \"East\", \"South\", \"West\"] actions_num =", "i * 3: 2 + i * 3 + 2]", "self.possibleActions() def possibleActions(self): if self.isTerminal(): # somewhat hacky, but should", "* 3: 2 + i * 3 + 2] =", "for graphics (original code assumes # graphics are updated after", "\"\"\" re-initializes internal states when an episode starts, returns a", "\"<NAME>\" class Pacman(Domain): \"\"\" Pacman domain, which acts as a", "5 episodeCap = 1000 #: location of layouts shipped with", "self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics: self._cleanup_graphics = False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules()", "= (s[0], s[1]) # set ghost position num_ghosts = len(agent_states)", "time\" is how long the ghost remains scared after consuming", "display every time pacman or a ghost moves. # s.data.food", "TODO: use domain random stream randomAction_ind = self.random_state.randint(len(ghostOptions)) randomAction =", "= 0 data.capsules = [] for char in str(self.layout_copy): if", "False self.savedtimer = None self.gameDisplay = None self._set_statespace_limits() super(Pacman, self).__init__()", "of pacman * [3 * ng] the x and y", "attempted to pass a state (s)'\\ 'to showDomain(); Pacman only", "*arg, **kwargs): pass def update(self, *arg, **kwargs): pass def finalize(self,", "internal states when an episode starts, returns a s vector", "= self.game_state.data agent_states = data.agentStates # set pacman position agent_states.configuration.pos", "_defaultSettings(self): self.ghostNum = 2 self.ghosts = [ghostAgents.RandomGhost( game.Agent) for i", "np.zeros( 2 + num_ghosts * 3 + self.num_total_food + self.num_total_capsules)", "rlpy default_layout_dir = os.path.join( __rlpy_location__, \"Domains\", \"PacmanPackage\", \"layouts\") def __init__(self,", "self.timerswitch = False self.savedtimer = None self.gameDisplay = None self._set_statespace_limits()", "# adds ghost x, y locations and scaredTimer (how long", "'to showDomain(); Pacman only supports internal states.'\\ 'If you do", "\"\"\" # copies most recent state data = self.game_state.data agent_states", "self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics: self._cleanup_graphics = False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food", "None.' raise Exception(errStr) s = self.game_state if self.gameDisplay is None:", "= -1 elif char == \"o\": coord = (x, self.layout_copy.height", "r = next_state.data.score - self.game_state.data.score self.game_state = next_state terminal =", "scoring in pacman r = next_state.data.score - self.game_state.data.score self.game_state =", "for the Pacman implementation from the BerkeleyX/CS188.1x course project 3.", "randomAction_ind = self.random_state.randint(len(ghostOptions)) randomAction = ghostOptions[randomAction_ind] next_state = next_state.generateSuccessor(i, randomAction)", "self.game_rules = pacman.ClassicGameRules(timeout=30) self.layout_copy = deepcopy(self.layout) self.game = self.game_rules.newGame( self.layout_copy,", "and *ng* can be set as a parameter. Based on", "self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules = self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) # converts s vector", "in the `Domains/PacmanPackage` folder. \"\"\" _max_scared_time = 39 actions =", "wrapper for the Pacman implementation from the BerkeleyX/CS188.1x course project", "ensure it is set to None.' raise Exception(errStr) s =", "ghostOptions[randomAction_ind] next_state = next_state.generateSuccessor(i, randomAction) # keep track of eaten", "1].scaredTimer # get food and capsules status i = 2", "s[2 + i * 3 + 2] = agent_states[i +", "BerkeleyX/CS188.1x course project 3. **STATE:** The state vector has a", "can be # eaten) for ghost in self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width", "eaten) for ghost in self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1,", "set ghost position num_ghosts = len(agent_states) - 1 for i", "terminal = self.isTerminal() return r, self._get_state(), terminal, self.possibleActions() def s0(self):", "# pacman.PacmanRules.applyAction(self.game_state, a) # pacman.GhostRules.checkDeath(self.game_state, 0) # the ghosts move", "= self.actions[a] next_state_p = self.game_state.generateSuccessor(0, a) next_state = next_state_p #", "\"\"\" layoutFile: filename of the map file noise: with this", "in range(1, num_ghosts + 1): part_s = s[(3 * i)", "well as the scare time of each ghost (\"scare time\"", "self.random_state.choice(self.possibleActions()) a = self.actions[a] next_state_p = self.game_state.generateSuccessor(0, a) next_state =", "import os import time __copyright__ = \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\"", "+= 1 return s state = property(_get_state, _set_state) def showDomain(self,", "data = self.game_state.data agent_states = data.agentStates # set pacman position", "self.gameDisplay.layout.food) self.gameDisplay.capsules = self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) # converts s vector in", "every agent's move) next_state.data._foodEaten = next_state_p.data._foodEaten next_state.data._capsuleEaten = next_state_p.data._capsuleEaten #", "2]) statespace_limits.append([1, self.layout.height - 2]) # adds ghost x, y", "self.num_total_capsules = len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics = None self.timerswitch = False", "= 0 y = 0 for char in str(self.layout_copy): if", "will permit; to slow things down so gameplay is actually", "and scaredTimer (how long they can be # eaten) for", "if self.isTerminal(): # somewhat hacky, but should not matter anyway,", "import Domain from .PacmanPackage import layout, pacman, game, ghostAgents from", "accounted for along with scoring and terminal checking. Returns a", "self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics: self._cleanup_graphics = False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food =", "as the scare time of each ghost (\"scare time\" is", "3 x = 0 y = 0 for char in", "in range(1, len(self.game_state.data.agentStates)): if next_state.isWin() or next_state.isLose(): break ghostOptions =", "from copy import deepcopy import os import time __copyright__ =", "pacman position agent_states.configuration.pos = (s[0], s[1]) # set ghost position", "an episode starts, returns a s vector \"\"\" self.game_state =", "original `source code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more details of the", "of each ghost (\"scare time\" is how long the ghost", "1 x = -1 elif char == \"o\": coord =", "s vector in pacman gamestate instance and updates # the", "if it is still on the board or not *nf*", "i += 1 elif char == \"\\n\": y += 1", "(s)'\\ 'to showDomain(); Pacman only supports internal states.'\\ 'If you", "3:] x = 0 y = 0 i = 0", "pacman gamestate instance and updates # the display every time", "(x, self.layout_copy.height - y) if coord in data.capsules: s[i] =", "for i in range(1, len(self.game_state.data.agentStates)): if next_state.isWin() or next_state.isLose(): break", "= 2 self.ghosts = [ghostAgents.RandomGhost( game.Agent) for i in range(self.ghostNum)]", "x and y coordinates as well as the scare time", "self.gameDisplay.food = self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules = self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) # converts", ".. note:: The visualization runs as fast as your CPU", "1) * 3:] x = 0 y = 0 i", "= noise # Specifies which Pacman world you want self.layoutFile", "pacman, game, ghostAgents from .PacmanPackage import graphicsDisplay import numpy as", "the one specified by the action \"\"\" self.noise = noise", "import graphicsDisplay import numpy as np from copy import deepcopy", "2 + num_ghosts * 3 x = 0 y =", "data.food[x][y] = bool(s_food[i]) i += 1 elif char == \"o\":", "Pacman domain, which acts as a wrapper for the Pacman", "= -1 x += 1 def _get_state(self): \"\"\" get the", "self.numGhostAgents) self._cleanup_graphics = True return self.state, self.isTerminal(), self.possibleActions() def possibleActions(self):", "# converts s vector in pacman gamestate instance and updates", "package. \"\"\" # copies most recent state data = self.game_state.data", "numGhostAgents=1000): \"\"\" layoutFile: filename of the map file noise: with", "so gameplay is actually visible, de-comment time.sleep() in the showDomain()", "copy import deepcopy import os import time __copyright__ = \"Copyright", "# TODO: use domain random stream randomAction_ind = self.random_state.randint(len(ghostOptions)) randomAction", "range(1, num_ghosts + 1): part_s = s[(3 * i) -", "converts s vector in pacman gamestate instance and updates #", "is still on the board or not *nf* and *nc*", "(2 + 3*ng + nf + nc). **ACTIONS:** Move Pacman", "given # state possibleActions = [] possibleMoves = pacman.GameState.getLegalActions( self.game_state,", "self.possibleActions() def s0(self): \"\"\" re-initializes internal states when an episode", "os.path.join( __rlpy_location__, \"Domains\", \"PacmanPackage\", \"layouts\") def __init__(self, noise=.1, timeout=30, layoutFile=os.path.join(", "anyway, maybe clean up in # the future return np.array([0])", "episodeCap = 1000 #: location of layouts shipped with rlpy", "range(self.ghostNum)] self.beQuiet = False def _tryToLoad(self, fullname): # used in", "original pacman package. \"\"\" # copies most recent state data", "1 elif char == \"o\": coord = (x, self.layout_copy.height -", "left, right, stay] **REWARD:** See the Berkeley project website below", "= os.path.join( __rlpy_location__, \"Domains\", \"PacmanPackage\", \"layouts\") def __init__(self, noise=.1, timeout=30,", "of the domain see the original package in the `Domains/PacmanPackage`", "terminal states. \"\"\" return self.game_state.data._lose or self.game_state.data._win def _defaultSettings(self): self.ghostNum", "s.data._agentMoved = agent self.gameDisplay.update(s.data) s._foodEaten = None s._capsuleEaten = None", "pass a state parameter, ensure it is set to None.'", "self.gameDisplay.removeAllCapsules() self.gameDisplay.food = self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules = self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) #", "#: location of layouts shipped with rlpy default_layout_dir = os.path.join(", "pacman package. \"\"\" # copies most recent state data =", "for success, all food on map eaten.) If game should", "# used in getLayout function f = open(fullname) grid =", "self.game_rules = pacman.ClassicGameRules(timeout) self.layout_copy = deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food =", "__license__ = \"BSD 3-Clause\" __author__ = \"<NAME>\" class Pacman(Domain): \"\"\"", "s.data.food for agent in range(len(s.data.agentStates)): s.data._agentMoved = agent self.gameDisplay.update(s.data) s._foodEaten", "*ng* can be set as a parameter. Based on above,", "folder. \"\"\" _max_scared_time = 39 actions = [\"Stop\", \"North\", \"East\",", "agent_states[i + 1].scaredTimer # get food and capsules status i", "raise Exception(errStr) s = self.game_state if self.gameDisplay is None: self.gameDisplay", "graphicsDisplay import numpy as np from copy import deepcopy import", "possibleActions = [] possibleMoves = pacman.GameState.getLegalActions( self.game_state, agentIndex=0) for a", "statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) # adds", "char in str(self.layout_copy): if char == \".\": data.food[x][y] = bool(s_food[i])", "* 3 + self.num_total_food + self.num_total_capsules) # get pacman position", "self.numGhostAgents = numGhostAgents # Intitializes Pacman game self.game_state = pacman.GameState()", "the state vector. statespace_limits = [] # adds pacman x,", "pacman.ClassicGameRules(timeout=30) self.layout_copy = deepcopy(self.layout) self.game = self.game_rules.newGame( self.layout_copy, pacman, self.ghosts,", "re-initializes internal states when an episode starts, returns a s", "sets the internal game state used by the original pacman", "as a numpy array \"\"\" data = self.game_state.data agent_states =", "self.layout_copy.height - y) if coord in data.capsules: s[i] = 1.", "y) if coord in data.capsules: s[i] = 1. i +=", "with rlpy default_layout_dir = os.path.join( __rlpy_location__, \"Domains\", \"PacmanPackage\", \"layouts\") def", "recent state data = self.game_state.data agent_states = data.agentStates # set", "for char in str(self.layout_copy): if char == \".\": s[i] =", "still on the board or not *nf* and *nc* are", "of form (reward, new state vector, terminal) \"\"\" if self.random_state.random_sample()", "self.restartGraphics = None self.timerswitch = False self.savedtimer = None self.gameDisplay", "x, y locations statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height -", "self.actions[a] next_state_p = self.game_state.generateSuccessor(0, a) next_state = next_state_p # pacman", "pacman.GameState.getLegalActions( self.game_state, agentIndex=0) for a in possibleMoves: possibleActions.append(self.actions.index(a)) return np.array(possibleActions)", "ghost info for i in range(num_ghosts): s[2 + i *", "1 for i in range(1, num_ghosts + 1): part_s =", "s=None): if s is not None: errStr = 'ERROR: In", "(x, self.layout_copy.height - y) if s_food[i]: data.capsules.append(coord) i += 1", "self.num_total_food + self.num_total_capsules) self.statespace_limits = np.array(statespace_limits, dtype=\"float\") def _set_state(self, s):", "2] = agent_states[i + 1].scaredTimer # get food and capsules", "def step(self, a): \"\"\" Applies actions from outside the Pacman", "pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout=30) self.layout_copy = deepcopy(self.layout) self.game = self.game_rules.newGame(", "2 + i * 3 + 2] = agent_states[i +", "y coordinates as well as the scare time of each", "self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics = True return self.state, self.isTerminal(), self.possibleActions() def", "data.capsules: s[i] = 1. i += 1 x += 1", "checking. Returns a tuple of form (reward, new state vector,", "\"\"\" self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout=30) self.layout_copy = deepcopy(self.layout)", "\"\"\" data = self.game_state.data agent_states = self.game_state.data.agentStates num_ghosts = len(agent_states)", "self.gameDisplay.layout.capsules) # converts s vector in pacman gamestate instance and", "<filename>rlpy/Domains/Pacman.py \"\"\"Pacman game domain.\"\"\" from rlpy.Tools import __rlpy_location__ from .Domain", "+ num_ghosts * 3 x = 0 y = 0", "an RLPy wrapper for the implementation from the `BerkeleyX/CS188.1x course", "\"\"\" get the internal game state represented as a numpy", "import ipdb; ipdb.set_trace() # get ghost info for i in", "food is still on the board or not * [nc]", "\"\"\" Applies actions from outside the Pacman domain to the", "array \"\"\" data = self.game_state.data agent_states = self.game_state.data.agentStates num_ghosts =", "game should terminate at the given state. (Terminate for failure,", "terminal, self.possibleActions() def s0(self): \"\"\" re-initializes internal states when an", "statespace_limits += [[0, 1]] * ( self.num_total_food + self.num_total_capsules) self.statespace_limits", "1:3 * i] agent_states[i].configuration.pos = (part_s[0], part_s[1]) agent_states[i].scaredTimer = part_s[2]", "open(fullname) grid = [line.strip() for line in f] f.close() return", "* 3:] x = 0 y = 0 i =", "self.layout_copy = deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food = len(self.layout_copy.food.asList()) self.num_total_capsules =", "# scoring in pacman r = next_state.data.score - self.game_state.data.score self.game_state", "from the BerkeleyX/CS188.1x course project 3. **STATE:** The state vector", "in str(self.layout_copy): if char == \".\": data.food[x][y] = bool(s_food[i]) i", "terminate at the given state. (Terminate for failure, ie eaten", "# state possibleActions = [] possibleMoves = pacman.GameState.getLegalActions( self.game_state, agentIndex=0)", "`BerkeleyX/CS188.1x course project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the original `source code", "= 1000 #: location of layouts shipped with rlpy default_layout_dir", "of state vector is map-dependent, and given by (2 +", "an array of possible actions pacman can perform at any", "position num_ghosts = len(agent_states) - 1 for i in range(1,", "correct food matrix s.data.layout.food = s.data.food for agent in range(len(s.data.agentStates)):", "def isTerminal(self): \"\"\" Checks whether the game should terminate at", "2]) statespace_limits.append([1, self.layout.height - 2]) statespace_limits.append([0, self._max_scared_time]) statespace_limits += [[0,", "is set to None.' raise Exception(errStr) s = self.game_state if", "if self.gameDisplay is None: self.gameDisplay = graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data)", "= 0 i = 0 data.capsules = [] for char", "pacman.ClassicGameRules(timeout) self.layout_copy = deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food = len(self.layout_copy.food.asList()) self.num_total_capsules", "instead the one specified by the action \"\"\" self.noise =", "self.num_total_capsules) # get pacman position s[:2] = agent_states[0].configuration.pos # import", "coord in data.capsules: s[i] = 1. i += 1 x", "# adds pacman x, y locations statespace_limits.append([1, self.layout.width - 2])", "specified by the action \"\"\" self.noise = noise # Specifies", "episode starts, returns a s vector \"\"\" self.game_state = pacman.GameState()", "2 self.ghosts = [ghostAgents.RandomGhost( game.Agent) for i in range(self.ghostNum)] self.beQuiet", "and *nc* are map-dependent, and *ng* can be set as", "long they can be # eaten) for ghost in self.game_state.data.agentStates[1:]:", "0.1 sec def step(self, a): \"\"\" Applies actions from outside", "internal states.'\\ 'If you do pass a state parameter, ensure", "y locations statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2])", "[\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\"] __license__ = \"BSD 3-Clause\" __author__", "as well as the scare time of each ghost (\"scare", "the game should terminate at the given state. (Terminate for", "state. Internal states accounted for along with scoring and terminal", "a state (s)'\\ 'to showDomain(); Pacman only supports internal states.'\\", "- y) if coord in data.capsules: s[i] = 1. i", "Domain from .PacmanPackage import layout, pacman, game, ghostAgents from .PacmanPackage", "agent's move) next_state.data._foodEaten = next_state_p.data._foodEaten next_state.data._capsuleEaten = next_state_p.data._capsuleEaten # scoring", "np.array([0]) # makes an array of possible actions pacman can", "and capsules status i = 2 + num_ghosts * 3", "or out of time, and for success, all food on", "\"\"\" self.noise = noise # Specifies which Pacman world you", "+= [[0, 1]] * ( self.num_total_food + self.num_total_capsules) self.statespace_limits =", "the ghost remains scared after consuming a capsule.) * [nf]", "self.isTerminal(), self.possibleActions() def possibleActions(self): if self.isTerminal(): # somewhat hacky, but", "should terminate at the given state. (Terminate for failure, ie", "after consuming a capsule.) * [nf] binary variables indicating if", "in f] f.close() return grid class DummyGraphics(object): def initialize(self, *arg,", "the Pacman implementation from the BerkeleyX/CS188.1x course project 3. **STATE:**", "terminate, returns the proper indication to step function. Accounts for", "in range(len(s.data.agentStates)): s.data._agentMoved = agent self.gameDisplay.update(s.data) s._foodEaten = None s._capsuleEaten", "3: 2 + i * 3 + 2] = agent_states[i", "__copyright__ = \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\" __credits__ = [\"<NAME>\", \"<NAME>\",", "food and capsules locations s_food = s[(num_ghosts + 1) *", "they can be # eaten) for ghost in self.game_state.data.agentStates[1:]: statespace_limits.append([1,", "x = 0 y = 0 for char in str(self.layout_copy):", "* [nf] binary variables indicating if a food is still", "self.savedtimer = None self.gameDisplay = None self._set_statespace_limits() super(Pacman, self).__init__() def", "instance and updates # the display every time pacman or", "to the given state. Internal states accounted for along with", "def _tryToLoad(self, fullname): # used in getLayout function f =", "self.num_total_food + self.num_total_capsules) # get pacman position s[:2] = agent_states[0].configuration.pos", "returns a s vector \"\"\" self.game_state = pacman.GameState() self.game_rules =", "position s[:2] = agent_states[0].configuration.pos # import ipdb; ipdb.set_trace() # get", "the correct food matrix s.data.layout.food = s.data.food for agent in", "import numpy as np from copy import deepcopy import os", "< self.noise: # Random Move a = self.random_state.choice(self.possibleActions()) a =", "= [\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\"] __license__ = \"BSD 3-Clause\"", "not None: errStr = 'ERROR: In Pacman.py, attempted to pass", "state possibleActions = [] possibleMoves = pacman.GameState.getLegalActions( self.game_state, agentIndex=0) for", "[nf] binary variables indicating if a food is still on", "world you want self.layoutFile = layoutFile # Puts the file", "\"o\": coord = (x, self.layout_copy.height - y) if s_food[i]: data.capsules.append(coord)", "= next_state.generateSuccessor(i, randomAction) # keep track of eaten stuff for", "= [] possibleMoves = pacman.GameState.getLegalActions( self.game_state, agentIndex=0) for a in", "None # time.sleep(0.1) # Sleep for 0.1 sec def step(self,", "be # eaten) for ghost in self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width -", "self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) # converts s vector in pacman gamestate instance", "* [3 * ng] the x and y coordinates as", "original package in the `Domains/PacmanPackage` folder. \"\"\" _max_scared_time = 39", "pacman x, y locations statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height", "the Berkeley project website below for more info. .. note::", "dimensionality of state vector is map-dependent, and given by (2", "None s._capsuleEaten = None # time.sleep(0.1) # Sleep for 0.1", "= None self.timerswitch = False self.savedtimer = None self.gameDisplay =", "states when an episode starts, returns a s vector \"\"\"", "+ num_ghosts * 3 + self.num_total_food + self.num_total_capsules) # get", "__rlpy_location__ from .Domain import Domain from .PacmanPackage import layout, pacman,", "is the correct food matrix s.data.layout.food = s.data.food for agent", "can be set as a parameter. Based on above, total", "s vector \"\"\" self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout=30) self.layout_copy", "= open(fullname) grid = [line.strip() for line in f] f.close()", "_set_statespace_limits(self): # Makes an array of limits for each dimension", "= agent_states[0].configuration.pos # import ipdb; ipdb.set_trace() # get ghost info", "return self.state, self.isTerminal(), self.possibleActions() def possibleActions(self): if self.isTerminal(): # somewhat", "next_state_p # pacman performs action \"a\" in current state object", "self.numGhostAgents) self.num_total_food = len(self.layout_copy.food.asList()) self.num_total_capsules = len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics =", "pacman.GhostRules.checkDeath(self.game_state, 0) # the ghosts move randomly for i in", "== \".\": s[i] = data.food[x][y] i += 1 elif char", "== \".\": data.food[x][y] = bool(s_food[i]) i += 1 elif char", "ghost remains scared after consuming a capsule.) * [nf] binary", "= (part_s[0], part_s[1]) agent_states[i].scaredTimer = part_s[2] # set food and", "if a food is still on the board or not", "x += 1 return s state = property(_get_state, _set_state) def", "[3 * ng] the x and y coordinates as well", "See the original `source code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more details", "s._foodEaten = None s._capsuleEaten = None # time.sleep(0.1) # Sleep", "a in possibleMoves: possibleActions.append(self.actions.index(a)) return np.array(possibleActions) def isTerminal(self): \"\"\" Checks", "coord = (x, self.layout_copy.height - y) if s_food[i]: data.capsules.append(coord) i", "y coordinates of pacman * [3 * ng] the x", "<https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the original `source code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more", "self.game_state.data._win def _defaultSettings(self): self.ghostNum = 2 self.ghosts = [ghostAgents.RandomGhost( game.Agent)", "not *nf* and *nc* are map-dependent, and *ng* can be", "be set as a parameter. Based on above, total dimensionality", "Move a = self.random_state.choice(self.possibleActions()) a = self.actions[a] next_state_p = self.game_state.generateSuccessor(0,", "for ghost in self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height", "a series of dimensions: * [2] The x and y", "s = self.game_state if self.gameDisplay is None: self.gameDisplay = graphicsDisplay.PacmanGraphics()", "project 3. **STATE:** The state vector has a series of", "= pacman.GhostRules.getLegalActions(next_state, i) # TODO: use domain random stream randomAction_ind", "Intitializes Pacman game self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout) self.layout_copy", "self.game = self.game_rules.newGame( self.layout_copy, pacman, self.ghosts, DummyGraphics(), self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy,", "and given by (2 + 3*ng + nf + nc).", "state vector has a series of dimensions: * [2] The", "set as a parameter. Based on above, total dimensionality of", "y += 1 x = -1 x += 1 def", "the ghosts move randomly for i in range(1, len(self.game_state.data.agentStates)): if", "vector \"\"\" self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout=30) self.layout_copy =", "str(self.layout_copy): if char == \".\": s[i] = data.food[x][y] i +=", "position agent_states.configuration.pos = (s[0], s[1]) # set ghost position num_ghosts", "is None: self.gameDisplay = graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics:", "\"\\n\": y += 1 x = -1 x += 1", "next_state_p = self.game_state.generateSuccessor(0, a) next_state = next_state_p # pacman performs", "of eaten stuff for graphics (original code assumes # graphics", "gameplay is actually visible, de-comment time.sleep() in the showDomain() method.", "0 data.capsules = [] for char in str(self.layout_copy): if char", "terminal checking. Returns a tuple of form (reward, new state", "state represented as a numpy array \"\"\" data = self.game_state.data", "graphics (original code assumes # graphics are updated after every", "which acts as a wrapper for the Pacman implementation from", "Pacman implementation from the BerkeleyX/CS188.1x course project 3. **STATE:** The", "state. (Terminate for failure, ie eaten by ghost or out", "step function. Accounts for scoring changes in terminal states. \"\"\"", "__rlpy_location__, \"Domains\", \"PacmanPackage\", \"layouts\") def __init__(self, noise=.1, timeout=30, layoutFile=os.path.join( default_layout_dir,", "is map-dependent, and given by (2 + 3*ng + nf", "game self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout) self.layout_copy = deepcopy(self.layout)", "state parameter, ensure it is set to None.' raise Exception(errStr)", "at the given state. (Terminate for failure, ie eaten by", "code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more details of the domain see", "+ 1) * 3:] x = 0 y = 0", "in current state object # pacman.PacmanRules.applyAction(self.game_state, a) # pacman.GhostRules.checkDeath(self.game_state, 0)", "a random move instead the one specified by the action", "_tryToLoad(self, fullname): # used in getLayout function f = open(fullname)", "not matter anyway, maybe clean up in # the future", "by the action \"\"\" self.noise = noise # Specifies which", "in self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2])", "details of the domain see the original package in the", "grid = [line.strip() for line in f] f.close() return grid", "= layout.Layout(layout_file_content) # Number of ghosts self.numGhostAgents = numGhostAgents #", "to None.' raise Exception(errStr) s = self.game_state if self.gameDisplay is", "self.ghostNum = 2 self.ghosts = [ghostAgents.RandomGhost( game.Agent) for i in", "self.game_state.data agent_states = data.agentStates # set pacman position agent_states.configuration.pos =", "with this probability pacman makes a random move instead the", "self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout) self.layout_copy = deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy,", "__init__(self, noise=.1, timeout=30, layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000): \"\"\" layoutFile: filename", "probability pacman makes a random move instead the one specified", "map-dependent, and given by (2 + 3*ng + nf +", "on the board or not *nf* and *nc* are map-dependent,", "step(self, a): \"\"\" Applies actions from outside the Pacman domain", "as fast as your CPU will permit; to slow things", "as np from copy import deepcopy import os import time", "# Specifies which Pacman world you want self.layoutFile = layoutFile", "vector has a series of dimensions: * [2] The x", "line in f] f.close() return grid class DummyGraphics(object): def initialize(self,", "on map eaten.) If game should terminate, returns the proper", "3-Clause\" __author__ = \"<NAME>\" class Pacman(Domain): \"\"\" Pacman domain, which", "[\"Stop\", \"North\", \"East\", \"South\", \"West\"] actions_num = 5 episodeCap =", "num_ghosts * 3 + self.num_total_food + self.num_total_capsules) # get pacman", "function f = open(fullname) grid = [line.strip() for line in", "project website below for more info. .. note:: The visualization", "state data = self.game_state.data agent_states = data.agentStates # set pacman", "ipdb.set_trace() # get ghost info for i in range(num_ghosts): s[2", "map file noise: with this probability pacman makes a random", "self.ghosts, DummyGraphics(), self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics = True return", "This domain is an RLPy wrapper for the implementation from", "part_s[1]) agent_states[i].scaredTimer = part_s[2] # set food and capsules locations", "i += 1 elif char == \"o\": coord = (x,", "game, ghostAgents from .PacmanPackage import graphicsDisplay import numpy as np", "= 0 for char in str(self.layout_copy): if char == \".\":", "break ghostOptions = pacman.GhostRules.getLegalActions(next_state, i) # TODO: use domain random", "= self.game_rules.newGame( self.layout_copy, pacman, self.ghosts, DummyGraphics(), self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents)", "2] = agent_states[i + 1].configuration.pos s[2 + i * 3", "elif self._cleanup_graphics: self._cleanup_graphics = False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food = self.gameDisplay.drawFood(", "data.capsules = [] for char in str(self.layout_copy): if char ==", "\"Copyright 2013, RLPy http://acl.mit.edu/RLPy\" __credits__ = [\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\",", "get pacman position s[:2] = agent_states[0].configuration.pos # import ipdb; ipdb.set_trace()", "\"\"\" Checks whether the game should terminate at the given", "+ 2] = agent_states[i + 1].scaredTimer # get food and", "proper indication to step function. Accounts for scoring changes in", "2 + num_ghosts * 3 + self.num_total_food + self.num_total_capsules) #", "makes a random move instead the one specified by the", "str(self.layout_copy): if char == \".\": data.food[x][y] = bool(s_food[i]) i +=", "pacman.GhostRules.getLegalActions(next_state, i) # TODO: use domain random stream randomAction_ind =", "or a ghost moves. # s.data.food is the correct food", "updates # the display every time pacman or a ghost", "layout_file_content = self._tryToLoad(self.layoutFile) self.layout = layout.Layout(layout_file_content) # Number of ghosts", "randomAction) # keep track of eaten stuff for graphics (original", "for i in range(num_ghosts): s[2 + i * 3: 2", "stay] **REWARD:** See the Berkeley project website below for more", "'trickyClassic.lay'), numGhostAgents=1000): \"\"\" layoutFile: filename of the map file noise:", "but should not matter anyway, maybe clean up in #", "= pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout=30) self.layout_copy = deepcopy(self.layout) self.game =", "scare time of each ghost (\"scare time\" is how long", "on the board or not * [nc] binary variables for", "num_ghosts = len(agent_states) - 1 s = np.zeros( 2 +", "self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout=30) self.layout_copy = deepcopy(self.layout) self.game", "set food and capsules locations s_food = s[(num_ghosts + 1)", "[] # adds pacman x, y locations statespace_limits.append([1, self.layout.width -", "= pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout) self.layout_copy = deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents)", "graphics are updated after every agent's move) next_state.data._foodEaten = next_state_p.data._foodEaten", "in pacman r = next_state.data.score - self.game_state.data.score self.game_state = next_state", "for scoring changes in terminal states. \"\"\" return self.game_state.data._lose or", "Pacman game self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout) self.layout_copy =", "(Terminate for failure, ie eaten by ghost or out of", "= agent_states[i + 1].configuration.pos s[2 + i * 3 +", "def __init__(self, noise=.1, timeout=30, layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000): \"\"\" layoutFile:", "of ghosts self.numGhostAgents = numGhostAgents # Intitializes Pacman game self.game_state", "ghost x, y locations and scaredTimer (how long they can", "s = np.zeros( 2 + num_ghosts * 3 + self.num_total_food", "ghost position num_ghosts = len(agent_states) - 1 for i in", "random stream randomAction_ind = self.random_state.randint(len(ghostOptions)) randomAction = ghostOptions[randomAction_ind] next_state =", "\"\"\" Takes a vector s and sets the internal game", "food matrix s.data.layout.food = s.data.food for agent in range(len(s.data.agentStates)): s.data._agentMoved", "a state parameter, ensure it is set to None.' raise", "\"<NAME>\", \"<NAME>\"] __license__ = \"BSD 3-Clause\" __author__ = \"<NAME>\" class", "course project 3. **STATE:** The state vector has a series", "object # pacman.PacmanRules.applyAction(self.game_state, a) # pacman.GhostRules.checkDeath(self.game_state, 0) # the ghosts", "binary variables indicating if a food is still on the", "y += 1 x = -1 elif char == \"o\":", "def _set_state(self, s): \"\"\" Takes a vector s and sets", ".PacmanPackage import graphicsDisplay import numpy as np from copy import", "def initialize(self, *arg, **kwargs): pass def update(self, *arg, **kwargs): pass", "== \"\\n\": y += 1 x = -1 x +=", "+ 3*ng + nf + nc). **ACTIONS:** Move Pacman [up,", "for line in f] f.close() return grid class DummyGraphics(object): def", "limits for each dimension in the state vector. statespace_limits =", "1 x += 1 return s state = property(_get_state, _set_state)", "and terminal checking. Returns a tuple of form (reward, new", "used in getLayout function f = open(fullname) grid = [line.strip()", "are map-dependent, and *ng* can be set as a parameter.", "when an episode starts, returns a s vector \"\"\" self.game_state", "= s[(num_ghosts + 1) * 3:] x = 0 y", "= self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules = self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) # converts s", "self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food = len(self.layout_copy.food.asList()) self.num_total_capsules = len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics", "= [] # adds pacman x, y locations statespace_limits.append([1, self.layout.width", "return np.array(possibleActions) def isTerminal(self): \"\"\" Checks whether the game should", "0) # the ghosts move randomly for i in range(1,", "if self.random_state.random_sample() < self.noise: # Random Move a = self.random_state.choice(self.possibleActions())", "i = 0 data.capsules = [] for char in str(self.layout_copy):", "for the implementation from the `BerkeleyX/CS188.1x course project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_", "up in # the future return np.array([0]) # makes an", "* ng] the x and y coordinates as well as", "next_state = next_state_p # pacman performs action \"a\" in current", "agent_states.configuration.pos = (s[0], s[1]) # set ghost position num_ghosts =", "# graphics are updated after every agent's move) next_state.data._foodEaten =", "3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the original `source code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For", "2]) # adds ghost x, y locations and scaredTimer (how", "0 i = 0 data.capsules = [] for char in", "time of each ghost (\"scare time\" is how long the", "package in the `Domains/PacmanPackage` folder. \"\"\" _max_scared_time = 39 actions", "along with scoring and terminal checking. Returns a tuple of", "Makes an array of limits for each dimension in the", "pass a state (s)'\\ 'to showDomain(); Pacman only supports internal", "pacman r = next_state.data.score - self.game_state.data.score self.game_state = next_state terminal", "only supports internal states.'\\ 'If you do pass a state", "success, all food on map eaten.) If game should terminate,", "x = -1 elif char == \"o\": coord = (x,", "= 'ERROR: In Pacman.py, attempted to pass a state (s)'\\", "(part_s[0], part_s[1]) agent_states[i].scaredTimer = part_s[2] # set food and capsules", "board or not * [nc] binary variables for each capsule", "1].configuration.pos s[2 + i * 3 + 2] = agent_states[i", "ghost or out of time, and for success, all food", "Sleep for 0.1 sec def step(self, a): \"\"\" Applies actions", "self.state, self.isTerminal(), self.possibleActions() def possibleActions(self): if self.isTerminal(): # somewhat hacky,", "0 for char in str(self.layout_copy): if char == \".\": s[i]", "range(num_ghosts): s[2 + i * 3: 2 + i *", "1 elif char == \"\\n\": y += 1 x =", "the internal game state represented as a numpy array \"\"\"", "self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) # adds ghost", "you want self.layoutFile = layoutFile # Puts the file in", "part_s[2] # set food and capsules locations s_food = s[(num_ghosts", "coordinates as well as the scare time of each ghost", "layoutFile: filename of the map file noise: with this probability", "states accounted for along with scoring and terminal checking. Returns", "\"a\" in current state object # pacman.PacmanRules.applyAction(self.game_state, a) # pacman.GhostRules.checkDeath(self.game_state,", "\"Domains\", \"PacmanPackage\", \"layouts\") def __init__(self, noise=.1, timeout=30, layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'),", "status i = 2 + num_ghosts * 3 x =", "is actually visible, de-comment time.sleep() in the showDomain() method. **REFERENCE:**", "self.layout = layout.Layout(layout_file_content) # Number of ghosts self.numGhostAgents = numGhostAgents", "in range(num_ghosts): s[2 + i * 3: 2 + i", "http://acl.mit.edu/RLPy\" __credits__ = [\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\"] __license__ =", "get ghost info for i in range(num_ghosts): s[2 + i", "a, s=None): if s is not None: errStr = 'ERROR:", "numGhostAgents # Intitializes Pacman game self.game_state = pacman.GameState() self.game_rules =", "= 2 + num_ghosts * 3 x = 0 y", "in terminal states. \"\"\" return self.game_state.data._lose or self.game_state.data._win def _defaultSettings(self):", "project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the original `source code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_", "x = 0 y = 0 i = 0 data.capsules", "domain, which acts as a wrapper for the Pacman implementation", "i in range(num_ghosts): s[2 + i * 3: 2 +", "food on map eaten.) If game should terminate, returns the", "= 5 episodeCap = 1000 #: location of layouts shipped", "= False def _tryToLoad(self, fullname): # used in getLayout function", "self._max_scared_time]) statespace_limits += [[0, 1]] * ( self.num_total_food + self.num_total_capsules)", "the `Domains/PacmanPackage` folder. \"\"\" _max_scared_time = 39 actions = [\"Stop\",", "f.close() return grid class DummyGraphics(object): def initialize(self, *arg, **kwargs): pass", "actions from outside the Pacman domain to the given state.", "ghostOptions = pacman.GhostRules.getLegalActions(next_state, i) # TODO: use domain random stream", "= bool(s_food[i]) i += 1 elif char == \"o\": coord", "f] f.close() return grid class DummyGraphics(object): def initialize(self, *arg, **kwargs):", "= np.zeros( 2 + num_ghosts * 3 + self.num_total_food +", "randomly for i in range(1, len(self.game_state.data.agentStates)): if next_state.isWin() or next_state.isLose():", "+= 1 def _get_state(self): \"\"\" get the internal game state", "things down so gameplay is actually visible, de-comment time.sleep() in", "as your CPU will permit; to slow things down so", "next_state.data.score - self.game_state.data.score self.game_state = next_state terminal = self.isTerminal() return", "showDomain() method. **REFERENCE:** This domain is an RLPy wrapper for", "char in str(self.layout_copy): if char == \".\": s[i] = data.food[x][y]", "bool(s_food[i]) i += 1 elif char == \"o\": coord =", "import time __copyright__ = \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\" __credits__ =", "given state. Internal states accounted for along with scoring and", "# import ipdb; ipdb.set_trace() # get ghost info for i", "def possibleActions(self): if self.isTerminal(): # somewhat hacky, but should not", "ghost (\"scare time\" is how long the ghost remains scared", "stuff for graphics (original code assumes # graphics are updated", "from rlpy.Tools import __rlpy_location__ from .Domain import Domain from .PacmanPackage", "x and y coordinates of pacman * [3 * ng]", "# get food and capsules status i = 2 +", "+= 1 elif char == \"o\": coord = (x, self.layout_copy.height", "one specified by the action \"\"\" self.noise = noise #", "changes in terminal states. \"\"\" return self.game_state.data._lose or self.game_state.data._win def", "Number of ghosts self.numGhostAgents = numGhostAgents # Intitializes Pacman game", "\"\"\"Pacman game domain.\"\"\" from rlpy.Tools import __rlpy_location__ from .Domain import", "+ 2] = agent_states[i + 1].configuration.pos s[2 + i *", "== \"o\": coord = (x, self.layout_copy.height - y) if coord", "line stripped format layout_file_content = self._tryToLoad(self.layoutFile) self.layout = layout.Layout(layout_file_content) #", "for char in str(self.layout_copy): if char == \".\": data.food[x][y] =", "0 y = 0 for char in str(self.layout_copy): if char", "= layoutFile # Puts the file in line stripped format", "by the original pacman package. \"\"\" # copies most recent", "self.layout.height - 2]) statespace_limits.append([0, self._max_scared_time]) statespace_limits += [[0, 1]] *", "3. **STATE:** The state vector has a series of dimensions:", "down, left, right, stay] **REWARD:** See the Berkeley project website", "for more info. .. note:: The visualization runs as fast", "statespace_limits.append([1, self.layout.height - 2]) statespace_limits.append([0, self._max_scared_time]) statespace_limits += [[0, 1]]", "r, self._get_state(), terminal, self.possibleActions() def s0(self): \"\"\" re-initializes internal states", "dtype=\"float\") def _set_state(self, s): \"\"\" Takes a vector s and", "\"BSD 3-Clause\" __author__ = \"<NAME>\" class Pacman(Domain): \"\"\" Pacman domain,", "for i in range(1, num_ghosts + 1): part_s = s[(3", "noise: with this probability pacman makes a random move instead", "[] possibleMoves = pacman.GameState.getLegalActions( self.game_state, agentIndex=0) for a in possibleMoves:", "* i] agent_states[i].configuration.pos = (part_s[0], part_s[1]) agent_states[i].scaredTimer = part_s[2] #", "num_ghosts * 3 x = 0 y = 0 for", "= self.random_state.choice(self.possibleActions()) a = self.actions[a] next_state_p = self.game_state.generateSuccessor(0, a) next_state", "stream randomAction_ind = self.random_state.randint(len(ghostOptions)) randomAction = ghostOptions[randomAction_ind] next_state = next_state.generateSuccessor(i,", "+= 1 elif char == \"\\n\": y += 1 x", "domain random stream randomAction_ind = self.random_state.randint(len(ghostOptions)) randomAction = ghostOptions[randomAction_ind] next_state", "the board or not *nf* and *nc* are map-dependent, and", "indicating if a food is still on the board or", "= 39 actions = [\"Stop\", \"North\", \"East\", \"South\", \"West\"] actions_num", "\"PacmanPackage\", \"layouts\") def __init__(self, noise=.1, timeout=30, layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000):", "variables indicating if a food is still on the board", "visualization runs as fast as your CPU will permit; to", "agent self.gameDisplay.update(s.data) s._foodEaten = None s._capsuleEaten = None # time.sleep(0.1)", "graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics: self._cleanup_graphics = False self.gameDisplay.removeAllFood()", "+ i * 3 + 2] = agent_states[i + 1].scaredTimer", "fullname): # used in getLayout function f = open(fullname) grid", "self._set_statespace_limits() super(Pacman, self).__init__() def _set_statespace_limits(self): # Makes an array of", "+ self.num_total_food + self.num_total_capsules) # get pacman position s[:2] =", "s[1]) # set ghost position num_ghosts = len(agent_states) - 1", "filename of the map file noise: with this probability pacman", "internal game state used by the original pacman package. \"\"\"", "use domain random stream randomAction_ind = self.random_state.randint(len(ghostOptions)) randomAction = ghostOptions[randomAction_ind]", "def _set_statespace_limits(self): # Makes an array of limits for each", "vector in pacman gamestate instance and updates # the display", "_set_state(self, s): \"\"\" Takes a vector s and sets the", "numpy array \"\"\" data = self.game_state.data agent_states = self.game_state.data.agentStates num_ghosts", "# time.sleep(0.1) # Sleep for 0.1 sec def step(self, a):", "(original code assumes # graphics are updated after every agent's", "[line.strip() for line in f] f.close() return grid class DummyGraphics(object):", "pass def update(self, *arg, **kwargs): pass def finalize(self, *arg, **kwargs):", "`source code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more details of the domain", "*nc* are map-dependent, and *ng* can be set as a", "_set_state) def showDomain(self, a, s=None): if s is not None:", "+ self.num_total_capsules) self.statespace_limits = np.array(statespace_limits, dtype=\"float\") def _set_state(self, s): \"\"\"", "self._tryToLoad(self.layoutFile) self.layout = layout.Layout(layout_file_content) # Number of ghosts self.numGhostAgents =", "agentIndex=0) for a in possibleMoves: possibleActions.append(self.actions.index(a)) return np.array(possibleActions) def isTerminal(self):", "the action \"\"\" self.noise = noise # Specifies which Pacman", "after every agent's move) next_state.data._foodEaten = next_state_p.data._foodEaten next_state.data._capsuleEaten = next_state_p.data._capsuleEaten", "\"South\", \"West\"] actions_num = 5 episodeCap = 1000 #: location", "noise=.1, timeout=30, layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000): \"\"\" layoutFile: filename of", "__credits__ = [\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\"] __license__ = \"BSD", "s_food = s[(num_ghosts + 1) * 3:] x = 0", "np from copy import deepcopy import os import time __copyright__", "(s[0], s[1]) # set ghost position num_ghosts = len(agent_states) -", "= self.game_state.data.agentStates num_ghosts = len(agent_states) - 1 s = np.zeros(", "\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\"] __license__ = \"BSD 3-Clause\" __author__ =", "actions_num = 5 episodeCap = 1000 #: location of layouts", "capsule.) * [nf] binary variables indicating if a food is", "action \"\"\" self.noise = noise # Specifies which Pacman world", "np.array(statespace_limits, dtype=\"float\") def _set_state(self, s): \"\"\" Takes a vector s", "if char == \".\": data.food[x][y] = bool(s_food[i]) i += 1", "info for i in range(num_ghosts): s[2 + i * 3:", "y = 0 for char in str(self.layout_copy): if char ==", "char == \"\\n\": y += 1 x = -1 x", "a = self.random_state.choice(self.possibleActions()) a = self.actions[a] next_state_p = self.game_state.generateSuccessor(0, a)", "+ 1].scaredTimer # get food and capsules status i =", "= 1. i += 1 x += 1 return s", "Checks whether the game should terminate at the given state.", "possibleMoves: possibleActions.append(self.actions.index(a)) return np.array(possibleActions) def isTerminal(self): \"\"\" Checks whether the", "move) next_state.data._foodEaten = next_state_p.data._foodEaten next_state.data._capsuleEaten = next_state_p.data._capsuleEaten # scoring in", "binary variables for each capsule indicating if it is still", "right, stay] **REWARD:** See the Berkeley project website below for", "i * 3 + 2] = agent_states[i + 1].scaredTimer #", "in range(self.ghostNum)] self.beQuiet = False def _tryToLoad(self, fullname): # used", "pacman performs action \"a\" in current state object # pacman.PacmanRules.applyAction(self.game_state,", "= deepcopy(self.layout) self.game = self.game_rules.newGame( self.layout_copy, pacman, self.ghosts, DummyGraphics(), self.beQuiet,", "= deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food = len(self.layout_copy.food.asList()) self.num_total_capsules = len(self.layout_copy.capsules)", "( self.num_total_food + self.num_total_capsules) self.statespace_limits = np.array(statespace_limits, dtype=\"float\") def _set_state(self,", "+= 1 x = -1 x += 1 def _get_state(self):", "part_s = s[(3 * i) - 1:3 * i] agent_states[i].configuration.pos", "scaredTimer (how long they can be # eaten) for ghost", "= agent self.gameDisplay.update(s.data) s._foodEaten = None s._capsuleEaten = None #", "shipped with rlpy default_layout_dir = os.path.join( __rlpy_location__, \"Domains\", \"PacmanPackage\", \"layouts\")", "the showDomain() method. **REFERENCE:** This domain is an RLPy wrapper", "y = 0 i = 0 data.capsules = [] for", "self.num_total_food = len(self.layout_copy.food.asList()) self.num_total_capsules = len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics = None", "each capsule indicating if it is still on the board", "self.isTerminal() return r, self._get_state(), terminal, self.possibleActions() def s0(self): \"\"\" re-initializes", "vector is map-dependent, and given by (2 + 3*ng +", "= \"<NAME>\" class Pacman(Domain): \"\"\" Pacman domain, which acts as", "s): \"\"\" Takes a vector s and sets the internal", "data.agentStates # set pacman position agent_states.configuration.pos = (s[0], s[1]) #", "= len(agent_states) - 1 for i in range(1, num_ghosts +", "self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics = True return self.state, self.isTerminal(),", "\".\": data.food[x][y] = bool(s_food[i]) i += 1 elif char ==", "s.data.food is the correct food matrix s.data.layout.food = s.data.food for", "+ nf + nc). **ACTIONS:** Move Pacman [up, down, left,", "= None s._capsuleEaten = None # time.sleep(0.1) # Sleep for", "DummyGraphics(), self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics = True return self.state,", "2013, RLPy http://acl.mit.edu/RLPy\" __credits__ = [\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\"]", "supports internal states.'\\ 'If you do pass a state parameter,", "= graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics: self._cleanup_graphics = False", "timeout=30, layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000): \"\"\" layoutFile: filename of the", "locations statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) #", "# Puts the file in line stripped format layout_file_content =", "False def _tryToLoad(self, fullname): # used in getLayout function f", "self.layoutFile = layoutFile # Puts the file in line stripped", "updated after every agent's move) next_state.data._foodEaten = next_state_p.data._foodEaten next_state.data._capsuleEaten =", "x += 1 def _get_state(self): \"\"\" get the internal game", "= next_state_p # pacman performs action \"a\" in current state", "function. Accounts for scoring changes in terminal states. \"\"\" return", "to pass a state (s)'\\ 'to showDomain(); Pacman only supports", "= self.game_state if self.gameDisplay is None: self.gameDisplay = graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self)", "a = self.actions[a] next_state_p = self.game_state.generateSuccessor(0, a) next_state = next_state_p", "# pacman performs action \"a\" in current state object #", "tuple of form (reward, new state vector, terminal) \"\"\" if", "* 3 x = 0 y = 0 for char", "agent in range(len(s.data.agentStates)): s.data._agentMoved = agent self.gameDisplay.update(s.data) s._foodEaten = None", "implementation from the `BerkeleyX/CS188.1x course project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the", "Accounts for scoring changes in terminal states. \"\"\" return self.game_state.data._lose", "# copies most recent state data = self.game_state.data agent_states =", "1]] * ( self.num_total_food + self.num_total_capsules) self.statespace_limits = np.array(statespace_limits, dtype=\"float\")", "variables for each capsule indicating if it is still on", "<https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more details of the domain see the original", "in getLayout function f = open(fullname) grid = [line.strip() for", "capsule indicating if it is still on the board or", "pacman position s[:2] = agent_states[0].configuration.pos # import ipdb; ipdb.set_trace() #", "DummyGraphics(object): def initialize(self, *arg, **kwargs): pass def update(self, *arg, **kwargs):", "pacman * [3 * ng] the x and y coordinates", "in # the future return np.array([0]) # makes an array", "self.gameDisplay = None self._set_statespace_limits() super(Pacman, self).__init__() def _set_statespace_limits(self): # Makes", "a) # pacman.GhostRules.checkDeath(self.game_state, 0) # the ghosts move randomly for", "eaten.) If game should terminate, returns the proper indication to", "self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food = self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules = self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules)", "property(_get_state, _set_state) def showDomain(self, a, s=None): if s is not", "or self.game_state.data._win def _defaultSettings(self): self.ghostNum = 2 self.ghosts = [ghostAgents.RandomGhost(", "Pacman domain to the given state. Internal states accounted for", "ghost in self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height -", "agent_states[i + 1].configuration.pos s[2 + i * 3 + 2]", "Puts the file in line stripped format layout_file_content = self._tryToLoad(self.layoutFile)", "# set pacman position agent_states.configuration.pos = (s[0], s[1]) # set", "set pacman position agent_states.configuration.pos = (s[0], s[1]) # set ghost", "which Pacman world you want self.layoutFile = layoutFile # Puts", "statespace_limits.append([1, self.layout.height - 2]) # adds ghost x, y locations", "possible actions pacman can perform at any given # state", "len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics = None self.timerswitch = False self.savedtimer =", "len(self.game_state.data.agentStates)): if next_state.isWin() or next_state.isLose(): break ghostOptions = pacman.GhostRules.getLegalActions(next_state, i)", "a) next_state = next_state_p # pacman performs action \"a\" in", "1 return s state = property(_get_state, _set_state) def showDomain(self, a,", "self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) statespace_limits.append([0,", "actions pacman can perform at any given # state possibleActions", "randomAction = ghostOptions[randomAction_ind] next_state = next_state.generateSuccessor(i, randomAction) # keep track", "of possible actions pacman can perform at any given #", "noise # Specifies which Pacman world you want self.layoutFile =", "self.random_state.random_sample() < self.noise: # Random Move a = self.random_state.choice(self.possibleActions()) a", "agent_states = data.agentStates # set pacman position agent_states.configuration.pos = (s[0],", "# s.data.food is the correct food matrix s.data.layout.food = s.data.food", "in the state vector. statespace_limits = [] # adds pacman", "default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000): \"\"\" layoutFile: filename of the map file", "the internal game state used by the original pacman package.", "returns the proper indication to step function. Accounts for scoring", "The state vector has a series of dimensions: * [2]", "states.'\\ 'If you do pass a state parameter, ensure it", "Pacman(Domain): \"\"\" Pacman domain, which acts as a wrapper for", "In Pacman.py, attempted to pass a state (s)'\\ 'to showDomain();", "by ghost or out of time, and for success, all", "= None self._set_statespace_limits() super(Pacman, self).__init__() def _set_statespace_limits(self): # Makes an", "nf + nc). **ACTIONS:** Move Pacman [up, down, left, right,", "de-comment time.sleep() in the showDomain() method. **REFERENCE:** This domain is", "i = 2 + num_ghosts * 3 x = 0", "you do pass a state parameter, ensure it is set", "agent_states[i].scaredTimer = part_s[2] # set food and capsules locations s_food", "+ 1): part_s = s[(3 * i) - 1:3 *" ]
[ "def get_working_directory(template): \"\"\"Return the collection which is the main working", "is the main working directory. template: Template which will be", "\"\"\"Global CMS settings.\"\"\" default_year = zope.schema.Int( title=_(\"Default year\"), min=1900, max=2100)", "default_volume = zope.schema.Int( title=_(\"Default volume\"), min=1, max=54) def get_working_directory(template): \"\"\"Return", "= zope.schema.Int( title=_(\"Default volume\"), min=1, max=54) def get_working_directory(template): \"\"\"Return the", "respective collection does not exist, it will be created before", "and volume. In ``template`` the placeholders $year and $volume will", "from zeit.cms.i18n import MessageFactory as _ import zope.interface import zope.schema", "zope.schema.Int( title=_(\"Default year\"), min=1900, max=2100) default_volume = zope.schema.Int( title=_(\"Default volume\"),", "main working directory. template: Template which will be filled with", "If the respective collection does not exist, it will be", "be filled with year and volume. In ``template`` the placeholders", "zeit.cms.i18n import MessageFactory as _ import zope.interface import zope.schema class", "which will be filled with year and volume. In ``template``", "_ import zope.interface import zope.schema class IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS settings.\"\"\"", "Example: 'online/$year/$volume/foo' If the respective collection does not exist, it", "title=_(\"Default volume\"), min=1, max=54) def get_working_directory(template): \"\"\"Return the collection which", "import MessageFactory as _ import zope.interface import zope.schema class IGlobalSettings(zope.interface.Interface):", "max=54) def get_working_directory(template): \"\"\"Return the collection which is the main", "year\"), min=1900, max=2100) default_volume = zope.schema.Int( title=_(\"Default volume\"), min=1, max=54)", "'online/$year/$volume/foo' If the respective collection does not exist, it will", "settings.\"\"\" default_year = zope.schema.Int( title=_(\"Default year\"), min=1900, max=2100) default_volume =", "$year and $volume will be replaced. Example: 'online/$year/$volume/foo' If the", "the main working directory. template: Template which will be filled", "year and volume. In ``template`` the placeholders $year and $volume", "IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS settings.\"\"\" default_year = zope.schema.Int( title=_(\"Default year\"), min=1900,", "$volume will be replaced. Example: 'online/$year/$volume/foo' If the respective collection", "min=1, max=54) def get_working_directory(template): \"\"\"Return the collection which is the", "not exist, it will be created before returning it. \"\"\"", "zope.interface import zope.schema class IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS settings.\"\"\" default_year =", "collection which is the main working directory. template: Template which", "zope.schema.Int( title=_(\"Default volume\"), min=1, max=54) def get_working_directory(template): \"\"\"Return the collection", "In ``template`` the placeholders $year and $volume will be replaced.", "with year and volume. In ``template`` the placeholders $year and", "``template`` the placeholders $year and $volume will be replaced. Example:", "volume. In ``template`` the placeholders $year and $volume will be", "max=2100) default_volume = zope.schema.Int( title=_(\"Default volume\"), min=1, max=54) def get_working_directory(template):", "import zope.interface import zope.schema class IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS settings.\"\"\" default_year", "which is the main working directory. template: Template which will", "and $volume will be replaced. Example: 'online/$year/$volume/foo' If the respective", "import zope.schema class IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS settings.\"\"\" default_year = zope.schema.Int(", "filled with year and volume. In ``template`` the placeholders $year", "replaced. Example: 'online/$year/$volume/foo' If the respective collection does not exist,", "as _ import zope.interface import zope.schema class IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS", "collection does not exist, it will be created before returning", "does not exist, it will be created before returning it.", "get_working_directory(template): \"\"\"Return the collection which is the main working directory.", "Template which will be filled with year and volume. In", "\"\"\"Return the collection which is the main working directory. template:", "= zope.schema.Int( title=_(\"Default year\"), min=1900, max=2100) default_volume = zope.schema.Int( title=_(\"Default", "class IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS settings.\"\"\" default_year = zope.schema.Int( title=_(\"Default year\"),", "min=1900, max=2100) default_volume = zope.schema.Int( title=_(\"Default volume\"), min=1, max=54) def", "CMS settings.\"\"\" default_year = zope.schema.Int( title=_(\"Default year\"), min=1900, max=2100) default_volume", "the placeholders $year and $volume will be replaced. Example: 'online/$year/$volume/foo'", "default_year = zope.schema.Int( title=_(\"Default year\"), min=1900, max=2100) default_volume = zope.schema.Int(", "the collection which is the main working directory. template: Template", "the respective collection does not exist, it will be created", "template: Template which will be filled with year and volume.", "will be filled with year and volume. In ``template`` the", "zope.schema class IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS settings.\"\"\" default_year = zope.schema.Int( title=_(\"Default", "placeholders $year and $volume will be replaced. Example: 'online/$year/$volume/foo' If", "working directory. template: Template which will be filled with year", "directory. template: Template which will be filled with year and", "title=_(\"Default year\"), min=1900, max=2100) default_volume = zope.schema.Int( title=_(\"Default volume\"), min=1,", "MessageFactory as _ import zope.interface import zope.schema class IGlobalSettings(zope.interface.Interface): \"\"\"Global", "be replaced. Example: 'online/$year/$volume/foo' If the respective collection does not", "will be replaced. Example: 'online/$year/$volume/foo' If the respective collection does", "volume\"), min=1, max=54) def get_working_directory(template): \"\"\"Return the collection which is" ]
[ "N, M = map(int, input().split()) for i in range(1, M", "j = (i - 2) // 2 print(M + 2", "range(1, M + 1): if i % 2 == 1:", "= (i - 1) // 2 print(1 + j, M", "j) else: j = (i - 2) // 2 print(M", "print(1 + j, M + 1 - j) else: j", "in range(1, M + 1): if i % 2 ==", "% 2 == 1: j = (i - 1) //", "1): if i % 2 == 1: j = (i", "<filename>abc/abc165/abc165e.py N, M = map(int, input().split()) for i in range(1,", "- 1) // 2 print(1 + j, M + 1", "j, M + 1 - j) else: j = (i", "1 - j) else: j = (i - 2) //", "2 print(M + 2 + j, 2 * M +", "= (i - 2) // 2 print(M + 2 +", "+ 1): if i % 2 == 1: j =", "1) // 2 print(1 + j, M + 1 -", "(i - 1) // 2 print(1 + j, M +", "if i % 2 == 1: j = (i -", "= map(int, input().split()) for i in range(1, M + 1):", "- j) else: j = (i - 2) // 2", "+ 1 - j) else: j = (i - 2)", "== 1: j = (i - 1) // 2 print(1", "else: j = (i - 2) // 2 print(M +", "// 2 print(1 + j, M + 1 - j)", "M + 1 - j) else: j = (i -", "M + 1): if i % 2 == 1: j", "// 2 print(M + 2 + j, 2 * M", "2 == 1: j = (i - 1) // 2", "map(int, input().split()) for i in range(1, M + 1): if", "input().split()) for i in range(1, M + 1): if i", "print(M + 2 + j, 2 * M + 1", "j = (i - 1) // 2 print(1 + j,", "- 2) // 2 print(M + 2 + j, 2", "2 print(1 + j, M + 1 - j) else:", "2 + j, 2 * M + 1 - j)", "+ j, M + 1 - j) else: j =", "1: j = (i - 1) // 2 print(1 +", "i in range(1, M + 1): if i % 2", "(i - 2) // 2 print(M + 2 + j,", "i % 2 == 1: j = (i - 1)", "for i in range(1, M + 1): if i %", "2) // 2 print(M + 2 + j, 2 *", "M = map(int, input().split()) for i in range(1, M +", "+ 2 + j, 2 * M + 1 -" ]
[ "API SDK for Python\", long_description=long_description, license='Apache License 2.0', packages=[ \"facebookbot\",", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR", "fd: m = reg.match(line) if m: __version__ = m.group(1) break", "may obtain # a copy of the License at #", "author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging API SDK for Python\",", "-*- # Licensed under the Apache License, Version 2.0 (the", "agreed to in writing, software # distributed under the License", "= fd.read() setup( name=\"fbsdk\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\",", "Unless required by applicable law or agreed to in writing,", "distributed under the License is distributed on an \"AS IS\"", "__version__ = '' with open('facebookbot/__about__.py', 'r') as fd: reg =", "Developers\", \"Programming Language :: Python :: 3\", \"Topic :: Software", "Apache Software License\", \"Intended Audience :: Developers\", \"Programming Language ::", "if m: __version__ = m.group(1) break def _requirements(): with open('requirements.txt',", "License, Version 2.0 (the \"License\"); you may # not use", "CONDITIONS OF ANY KIND, either express or implied. See the", "'r') as fd: long_description = fd.read() setup( name=\"fbsdk\", version=__version__, author=\"<NAME>\",", "obtain # a copy of the License at # #", "as fd: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in", "\"facebookbot\", \"facebookbot.models\" ], install_requires=_requirements(), classifiers=[ \"Development Status :: 5 -", "applicable law or agreed to in writing, software # distributed", "'r') as fd: return [name.strip() for name in fd.readlines()] with", "'r') as fd: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line", "coding: utf-8 -*- # Licensed under the Apache License, Version", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "Version 2.0 (the \"License\"); you may # not use this", "open('facebookbot/__about__.py', 'r') as fd: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for", "specific language governing permissions and limitations # under the License.", "line in fd: m = reg.match(line) if m: __version__ =", "setup( name=\"fbsdk\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging", "], install_requires=_requirements(), classifiers=[ \"Development Status :: 5 - Production/Stable\", \"License", "License\", \"Intended Audience :: Developers\", \"Programming Language :: Python ::", "# not use this file except in compliance with the", "not use this file except in compliance with the License.", "OF ANY KIND, either express or implied. See the #", "long_description = fd.read() setup( name=\"fbsdk\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\",", "license='Apache License 2.0', packages=[ \"facebookbot\", \"facebookbot.models\" ], install_requires=_requirements(), classifiers=[ \"Development", ":: 5 - Production/Stable\", \"License :: OSI Approved :: Apache", "import setup from setuptools.command.test import test as TestCommand __version__ =", "packages=[ \"facebookbot\", \"facebookbot.models\" ], install_requires=_requirements(), classifiers=[ \"Development Status :: 5", "writing, software # distributed under the License is distributed on", "m = reg.match(line) if m: __version__ = m.group(1) break def", "WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express", "in writing, software # distributed under the License is distributed", "import sys from setuptools import setup from setuptools.command.test import test", "TestCommand __version__ = '' with open('facebookbot/__about__.py', 'r') as fd: reg", "for Python\", long_description=long_description, license='Apache License 2.0', packages=[ \"facebookbot\", \"facebookbot.models\" ],", "Status :: 5 - Production/Stable\", \"License :: OSI Approved ::", "in compliance with the License. You may obtain # a", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "License for the specific language governing permissions and limitations #", "re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in fd: m = reg.match(line)", "the License. You may obtain # a copy of the", "an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF", "on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS", "use this file except in compliance with the License. You", "You may obtain # a copy of the License at", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "= reg.match(line) if m: __version__ = m.group(1) break def _requirements():", "utf-8 -*- # Licensed under the Apache License, Version 2.0", "name=\"fbsdk\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging API", "setuptools import setup from setuptools.command.test import test as TestCommand __version__", "= '' with open('facebookbot/__about__.py', 'r') as fd: reg = re.compile(r'__version__", "for line in fd: m = reg.match(line) if m: __version__", "fd: return [name.strip() for name in fd.readlines()] with open('README.rst', 'r')", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging API SDK for Python\", long_description=long_description, license='Apache License", "install_requires=_requirements(), classifiers=[ \"Development Status :: 5 - Production/Stable\", \"License ::", "reg.match(line) if m: __version__ = m.group(1) break def _requirements(): with", "_requirements(): with open('requirements.txt', 'r') as fd: return [name.strip() for name", "# under the License. import re import sys from setuptools", "[\\'\"]([^\\'\"]*)[\\'\"]') for line in fd: m = reg.match(line) if m:", "description=\"Facebook Messaging API SDK for Python\", long_description=long_description, license='Apache License 2.0',", "sys from setuptools import setup from setuptools.command.test import test as", "either express or implied. See the # License for the", "limitations # under the License. import re import sys from", "# -*- coding: utf-8 -*- # Licensed under the Apache", "under the License is distributed on an \"AS IS\" BASIS,", "maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging API SDK for Python\", long_description=long_description,", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "fd.readlines()] with open('README.rst', 'r') as fd: long_description = fd.read() setup(", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "may # not use this file except in compliance with", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "return [name.strip() for name in fd.readlines()] with open('README.rst', 'r') as", "under the License. import re import sys from setuptools import", "for name in fd.readlines()] with open('README.rst', 'r') as fd: long_description", "\"Intended Audience :: Developers\", \"Programming Language :: Python :: 3\",", "fd: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in fd:", "License is distributed on an \"AS IS\" BASIS, WITHOUT #", "with the License. You may obtain # a copy of", "KIND, either express or implied. See the # License for", "# License for the specific language governing permissions and limitations", "- Production/Stable\", \"License :: OSI Approved :: Apache Software License\",", "License. import re import sys from setuptools import setup from", "Language :: Python :: 3\", \"Topic :: Software Development\" ]", "you may # not use this file except in compliance", "Python\", long_description=long_description, license='Apache License 2.0', packages=[ \"facebookbot\", \"facebookbot.models\" ], install_requires=_requirements(),", "\"License\"); you may # not use this file except in", "break def _requirements(): with open('requirements.txt', 'r') as fd: return [name.strip()", "[name.strip() for name in fd.readlines()] with open('README.rst', 'r') as fd:", "IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND,", "Messaging API SDK for Python\", long_description=long_description, license='Apache License 2.0', packages=[", "express or implied. See the # License for the specific", "this file except in compliance with the License. You may", ":: OSI Approved :: Apache Software License\", \"Intended Audience ::", "compliance with the License. You may obtain # a copy", "from setuptools.command.test import test as TestCommand __version__ = '' with", "\"License :: OSI Approved :: Apache Software License\", \"Intended Audience", ":: Developers\", \"Programming Language :: Python :: 3\", \"Topic ::", "the Apache License, Version 2.0 (the \"License\"); you may #", ":: Python :: 3\", \"Topic :: Software Development\" ] )", "Software License\", \"Intended Audience :: Developers\", \"Programming Language :: Python", "import test as TestCommand __version__ = '' with open('facebookbot/__about__.py', 'r')", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "classifiers=[ \"Development Status :: 5 - Production/Stable\", \"License :: OSI", "as fd: return [name.strip() for name in fd.readlines()] with open('README.rst',", "# WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "5 - Production/Stable\", \"License :: OSI Approved :: Apache Software", "with open('README.rst', 'r') as fd: long_description = fd.read() setup( name=\"fbsdk\",", "SDK for Python\", long_description=long_description, license='Apache License 2.0', packages=[ \"facebookbot\", \"facebookbot.models\"", "See the # License for the specific language governing permissions", "reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in fd: m", "software # distributed under the License is distributed on an", "(the \"License\"); you may # not use this file except", "import re import sys from setuptools import setup from setuptools.command.test", "in fd: m = reg.match(line) if m: __version__ = m.group(1)", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "the # License for the specific language governing permissions and", "# a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "# # Unless required by applicable law or agreed to", "= m.group(1) break def _requirements(): with open('requirements.txt', 'r') as fd:", "= re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in fd: m =", "with open('requirements.txt', 'r') as fd: return [name.strip() for name in", "open('README.rst', 'r') as fd: long_description = fd.read() setup( name=\"fbsdk\", version=__version__,", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "file except in compliance with the License. You may obtain", "Approved :: Apache Software License\", \"Intended Audience :: Developers\", \"Programming", "m.group(1) break def _requirements(): with open('requirements.txt', 'r') as fd: return", "author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging API SDK for", "the License. import re import sys from setuptools import setup", "as TestCommand __version__ = '' with open('facebookbot/__about__.py', 'r') as fd:", "as fd: long_description = fd.read() setup( name=\"fbsdk\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\",", "permissions and limitations # under the License. import re import", "version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging API SDK", "for the specific language governing permissions and limitations # under", "law or agreed to in writing, software # distributed under", "OR CONDITIONS OF ANY KIND, either express or implied. See", "the specific language governing permissions and limitations # under the", "governing permissions and limitations # under the License. import re", "in fd.readlines()] with open('README.rst', 'r') as fd: long_description = fd.read()", "long_description=long_description, license='Apache License 2.0', packages=[ \"facebookbot\", \"facebookbot.models\" ], install_requires=_requirements(), classifiers=[", "under the Apache License, Version 2.0 (the \"License\"); you may", "\"facebookbot.models\" ], install_requires=_requirements(), classifiers=[ \"Development Status :: 5 - Production/Stable\",", "except in compliance with the License. You may obtain #", "2.0 (the \"License\"); you may # not use this file", "def _requirements(): with open('requirements.txt', 'r') as fd: return [name.strip() for", "implied. See the # License for the specific language governing", "\"Development Status :: 5 - Production/Stable\", \"License :: OSI Approved", "re import sys from setuptools import setup from setuptools.command.test import", "from setuptools import setup from setuptools.command.test import test as TestCommand", "language governing permissions and limitations # under the License. import", "#!/usr/bin/env python # -*- coding: utf-8 -*- # Licensed under", "setup from setuptools.command.test import test as TestCommand __version__ = ''", "License. You may obtain # a copy of the License", "OSI Approved :: Apache Software License\", \"Intended Audience :: Developers\",", "by applicable law or agreed to in writing, software #", "# distributed under the License is distributed on an \"AS", "ANY KIND, either express or implied. See the # License", "python # -*- coding: utf-8 -*- # Licensed under the", "\"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "setuptools.command.test import test as TestCommand __version__ = '' with open('facebookbot/__about__.py',", "= [\\'\"]([^\\'\"]*)[\\'\"]') for line in fd: m = reg.match(line) if", "# Unless required by applicable law or agreed to in", "name in fd.readlines()] with open('README.rst', 'r') as fd: long_description =", "License 2.0', packages=[ \"facebookbot\", \"facebookbot.models\" ], install_requires=_requirements(), classifiers=[ \"Development Status", "2.0', packages=[ \"facebookbot\", \"facebookbot.models\" ], install_requires=_requirements(), classifiers=[ \"Development Status ::", "Audience :: Developers\", \"Programming Language :: Python :: 3\", \"Topic", "-*- coding: utf-8 -*- # Licensed under the Apache License,", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "open('requirements.txt', 'r') as fd: return [name.strip() for name in fd.readlines()]", ":: Apache Software License\", \"Intended Audience :: Developers\", \"Programming Language", "\"Programming Language :: Python :: 3\", \"Topic :: Software Development\"", "to in writing, software # distributed under the License is", "is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES", "Production/Stable\", \"License :: OSI Approved :: Apache Software License\", \"Intended", "'' with open('facebookbot/__about__.py', 'r') as fd: reg = re.compile(r'__version__ =", "and limitations # under the License. import re import sys", "maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging API SDK for Python\", long_description=long_description, license='Apache", "<reponame>giggslam/python-messengerbot-sdk<filename>setup.py #!/usr/bin/env python # -*- coding: utf-8 -*- # Licensed", "BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either", "with open('facebookbot/__about__.py', 'r') as fd: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]')", "__version__ = m.group(1) break def _requirements(): with open('requirements.txt', 'r') as", "m: __version__ = m.group(1) break def _requirements(): with open('requirements.txt', 'r')", "or agreed to in writing, software # distributed under the", "test as TestCommand __version__ = '' with open('facebookbot/__about__.py', 'r') as", "required by applicable law or agreed to in writing, software", "fd.read() setup( name=\"fbsdk\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook", "or implied. See the # License for the specific language", "Apache License, Version 2.0 (the \"License\"); you may # not", "fd: long_description = fd.read() setup( name=\"fbsdk\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\"," ]
[ "2.0 (the \"License\"); # you may not use this file", "a decoder. Mask values selected in `[0, 1]`: - 1", "its model (such as downloading or saving, resizing the input", "else: raise ValueError(\"You have to specify either input_ids or inputs_embeds\")", "weights after the attention softmax, used to compute the weighted", "a [`~file_utils.ModelOutput`] instead of a plain tuple. \"\"\" @add_start_docstrings( \"The", "used for classification tasks. modal_end_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*):", "Encoder for the second modality. It should take in a", "See `attentions` under returned tensors for more detail. output_hidden_states (`bool`,", "parameters of the model. Initializing with a config file does", "# coding=utf-8 # Copyright (c) Facebook, Inc. and its affiliates.", "seq_length += 1 token_embeddings = torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1) if end_token", "elements depending on the configuration (config) and inputs: **loss**: (*optional*,", "= self.proj_embeddings(self.encoder(input_modal)) seq_length = token_embeddings.size(1) if start_token is not None:", "Sequence of hidden-states at the output of the last layer", "Model configuration class with all the parameters of the model.", "[`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor` of", "output of each layer + the output of the embeddings)", "sequence_length, sequence_length)`: Attentions weights after the attention softmax, used to", "Attentions weights after the attention softmax, used to compute the", "modal_start_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional start token to", "return_dict: output = (logits,) + outputs[2:] return ((loss,) + output)", "for tokens that are **not masked**, - 0 for tokens", "config.num_labels)` Classification (or regression if config.num_labels==1) scores (before SoftMax). **hidden_states**:", "0 for tokens that are **masked**. output_attentions (`bool`, *optional*): Whether", "of shape `(batch_size, modal_sequence_length)`: Segment token indices to indicate different", "if attention_mask is None: attention_mask = torch.ones(input_shape, device=device) else: attention_mask", "corresponds to a *sentence A* token, - 1 corresponds to", "performance on various multimodal classification benchmark tasks. This model inherits", "ImageEncoder(args) mmbt = MMBTModel(config, transformer, encoder) \"\"\" output_attentions = output_attentions", "as a decoder. encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*):", "return_dict=None, ): return_dict = return_dict if return_dict is not None", "self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None", "the head is **not masked**, - 0 indicates the head", "end_token=None, position_ids=None, token_type_ids=None): token_embeddings = self.proj_embeddings(self.encoder(input_modal)) seq_length = token_embeddings.size(1) if", "License for the specific language governing permissions and # limitations", "None else output return SequenceClassifierOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, )", "configuration. transformer (:class: *~nn.Module*): A text transformer that is used", "input_modal_shape = modal_embeddings.size()[:-1] if token_type_ids is None: token_type_ids = torch.ones(input_txt_shape,", "@add_start_docstrings( \"\"\" MMBT Model with a sequence classification/regression head on", "encoder, embeddings): super().__init__() self.config = config self.encoder = encoder self.proj_embeddings", "(a linear layer on top of the pooled output) \"\"\",", "embeddings for the non-text modality. position_ids (`torch.LongTensor` of shape `(batch_size,", "pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) def get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self,", "to indicate different portions of the non-text modality. The embeddings", "attention masks?](../glossary#attention-mask) token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, sequence_length)`: Segment", "Embedding. [CLS] Most commonly used for classification tasks. modal_end_tokens (`torch.LongTensor`", "permissions and # limitations under the License. \"\"\"PyTorch MMBT model.", "`hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*):", ") def get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self, value): self.embeddings.word_embeddings =", "attentions tensors of all attention layers. See `attentions` under returned", "token, - 1 corresponds to a *sentence B* token. [What", "token_embeddings = self.proj_embeddings(self.encoder(input_modal)) seq_length = token_embeddings.size(1) if start_token is not", "input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, ) input_modal_shape = modal_embeddings.size()[:-1] if", "padding token indices of the encoder input. This mask is", "not to return the attentions tensors of all attention layers.", "modal_position_ids=None, head_mask=None, inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r\"\"\"", "embeddings = token_embeddings + position_embeddings + token_type_embeddings embeddings = self.LayerNorm(embeddings)", "= torch.ones(input_shape, device=device) else: encoder_attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device), encoder_attention_mask],", "both input_ids and inputs_embeds at the same time\") elif input_ids", "(`bool`, *optional*): Whether or not to return the hidden states", "inputs_embeds is not None: raise ValueError(\"You cannot specify both input_ids", "of the self-attention modules. Mask values selected in `[0, 1]`:", "(`torch.FloatTensor` of shape `(batch_size, ***)`): The other modality data. It", "usage and behavior. Parameters: config ([`MMBTConfig`]): Model configuration class with", "have to specify either input_ids or inputs_embeds\") device = input_ids.device", "attention_mask is None: attention_mask = torch.ones(input_shape, device=device) else: attention_mask =", "= self.get_extended_attention_mask(attention_mask, input_shape, self.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) head_mask = self.get_head_mask(head_mask,", "= config self.encoder = encoder self.proj_embeddings = nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings", "tokens in the position embeddings for the non-text modality. Selected", "`(batch_size,)`: Labels for computing the sequence classification/regression loss. Indices should", "self.proj_embeddings(self.encoder(input_modal)) seq_length = token_embeddings.size(1) if start_token is not None: start_token_embeds", "attention on padding token indices. Mask values selected in `[0,", "shape `(batch_size, sequence_length, hidden_size)`: Hidden-states of the model at the", "output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions", "labels is not None: if self.num_labels == 1: # We", "OF ANY KIND, either express or implied. # See the", "from [`PreTrainedModel`]. Check the superclass documentation for the generic methods", "See the License for the specific language governing permissions and", "= model(input_modal, input_ids, labels=labels) loss, logits = outputs[:2] ```\"\"\" def", ") class MMBTForClassification(nn.Module): r\"\"\" **labels**: (*optional*) `torch.LongTensor` of shape `(batch_size,)`:", "not None else output return SequenceClassifierOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions,", "to in writing, software # distributed under the License is", "pooled output) \"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, ) class MMBTForClassification(nn.Module): r\"\"\" **labels**:", "outputs = model(input_modal, input_ids, labels=labels) loss, logits = outputs[:2] ```\"\"\"", "runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) mmbt = MMBTModel(config,", "or agreed to in writing, software # distributed under the", "= embedding_output.size()[:-1] if attention_mask is None: attention_mask = torch.ones(input_shape, device=device)", "- 0 for tokens that are **masked**. [What are attention", "is not None: raise ValueError(\"You cannot specify both input_ids and", "if return_dict is not None else self.config.use_return_dict outputs = self.mmbt(", "config.num_labels) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None,", "encoder, and pooler attributes. encoder (:class: *~nn.Module*): Encoder for the", "computing the sequence classification/regression loss. Indices should be in `[0,", "compliance with the License. # You may obtain a copy", "token_embeddings], dim=1) if end_token is not None: end_token_embeds = self.word_embeddings(end_token)", "*optional*): Sequence of hidden-states at the output of the last", "provided) `torch.FloatTensor` of shape `(1,)`: Classification (or regression if config.num_labels==1)", "`[0, 1]`: - 0 corresponds to a *sentence A* token,", "(`bool`, *optional*): Whether or not to return a [`~file_utils.ModelOutput`] instead", "shape `(batch_size,)`, *optional*): Optional end token to be added to", "embeddings) of shape `(batch_size, sequence_length, hidden_size)`: Hidden-states of the model", "1 for tokens that are **not masked**, - 0 for", "if input_ids is not None and inputs_embeds is not None:", "<gh_stars>1-10 # coding=utf-8 # Copyright (c) Facebook, Inc. and its", "self.classifier(pooled_output) loss = None if labels is not None: if", "= None if labels is not None: if self.num_labels ==", "Initializing with a config file does not load the weights", "not use this file except in compliance with the License.", "labels=labels) loss, logits = outputs[:2] ```\"\"\" def __init__(self, config, transformer,", "self.mmbt( input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids,", "start token to be added to Other Modality Embedding. [CLS]", "you may not use this file except in compliance with", "Mask to avoid performing attention on the padding token indices", "(sequence_output, pooled_output) + encoder_outputs[1:] return BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions,", "logits = outputs[:2] ```\"\"\" def __init__(self, config, transformer, encoder): super().__init__()", "methods the library implements for all its model (such as", "= self.modal_encoder( input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, ) input_modal_shape =", "shape `(batch_size, modal_sequence_length)`, *optional*): Indices of positions of each input", "Inc. team. # # Licensed under the Apache License, Version", "self.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs =", "returned when `output_hidden_states=True`) list of `torch.FloatTensor` (one for the output", "attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict, ) pooled_output", "else inputs_embeds.device modal_embeddings = self.modal_encoder( input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids,", "`input_ids` you can choose to directly pass an embedded representation.", "nn.Dropout(p=config.hidden_dropout_prob) def forward(self, input_modal, start_token=None, end_token=None, position_ids=None, token_type_ids=None): token_embeddings =", "shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "\"MMBTConfig\" class ModalEmbeddings(nn.Module): \"\"\"Generic Modal Embeddings which takes in an", "input_txt_shape = inputs_embeds.size()[:-1] else: raise ValueError(\"You have to specify either", "Indices are selected in `[0, 1]`: - 0 corresponds to", "+ encoder_outputs[1:] return BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) def", "= torch.cat([modal_embeddings, txt_embeddings], 1) input_shape = embedding_output.size()[:-1] if attention_mask is", "be in `[0, ..., config.num_labels - 1]`. If `config.num_labels ==", "None: input_txt_shape = inputs_embeds.size()[:-1] else: raise ValueError(\"You have to specify", "transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def forward( self, input_modal, input_ids=None, modal_start_tokens=None,", "be summed with the respective token embeddings for the non-text", "*optional*): Indices of positions of each input sequence tokens in", "example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args)", "tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not", "config.num_labels - 1]`. If `config.num_labels == 1` a regression loss", "of shape `(batch_size, config.num_labels)` Classification (or regression if config.num_labels==1) scores", "and inputs: **loss**: (*optional*, returned when `labels` is provided) `torch.FloatTensor`", "head on top.\", MMBT_START_DOCSTRING, ) class MMBTModel(nn.Module, ModuleUtilsMixin): def __init__(self,", "or not to return the attentions tensors of all attention", "in the vocabulary. It does not expect [CLS] token to", "to return the hidden states of all layers. See `hidden_states`", "= transformer self.modal_encoder = ModalEmbeddings(config, encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC)", "if not return_dict: return (sequence_output, pooled_output) + encoder_outputs[1:] return BaseModelOutputWithPooling(", "> 1` a classification loss is computed (Cross-Entropy). Returns: *Tuple*", "modal_sequence_length)`: Segment token indices to indicate different portions of the", "choose to directly pass an embedded representation. This is useful", "input. This mask is used in the cross-attention if the", "input sequence tokens in the vocabulary. It does not expect", "input_ids, labels=labels) loss, logits = outputs[:2] ```\"\"\" def __init__(self, config,", "used to compute the weighted average in the self-attention heads.", "It should have embeddings, encoder, and pooler attributes. encoder (:class:", "<NAME>, <NAME>. It's a supervised multimodal bitransformer model that fuses", "= encoder self.proj_embeddings = nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings = embeddings.position_embeddings self.token_type_embeddings", "the range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids)", "config ([`MMBTConfig`]): Model configuration class with all the parameters of", "not None: input_txt_shape = inputs_embeds.size()[:-1] else: raise ValueError(\"You have to", "== 1` a regression loss is computed (Mean-Square loss), If", "classification loss is computed (Cross-Entropy). Returns: *Tuple* comprising various elements", "value @add_start_docstrings( \"\"\" MMBT Model with a sequence classification/regression head", "transformer, encoder) \"\"\" output_attentions = output_attentions if output_attentions is not", "inputs_embeds.device modal_embeddings = self.modal_encoder( input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, )", "fuses information from text and other image encoders, and obtain", "embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`. [What", "limitations under the License. \"\"\"PyTorch MMBT model. \"\"\" import torch", "Segment token indices to indicate first and second portions of", "model(input_modal, input_ids, labels=labels) loss, logits = outputs[:2] ```\"\"\" def __init__(self,", "the non-text modality. Selected in the range `[0, config.max_position_embeddings -", "config_class=_CONFIG_FOR_DOC) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None,", "This mask is used in the cross-attention if the model", "**not masked**, - 0 indicates the head is **masked**. inputs_embeds", "input_ids is not None and inputs_embeds is not None: raise", "loss is computed (Cross-Entropy). Returns: *Tuple* comprising various elements depending", "non-text modality. The embeddings from these tokens will be summed", "modal_position_ids=None, head_mask=None, inputs_embeds=None, labels=None, return_dict=None, ): return_dict = return_dict if", "(*optional*) `torch.LongTensor` of shape `(batch_size, modal_sequence_length)`: Segment token indices to", "self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None,", "mask is used in the cross-attention if the model is", "range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) modal_position_ids", "output of each layer plus the initial embedding outputs. **attentions**:", "embeddings MMBT_START_DOCSTRING = r\"\"\" MMBT model was proposed in [Supervised", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "dtype=torch.long, device=input_modal.device) position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if token_type_ids is None:", "inputs_embeds=inputs_embeds ) embedding_output = torch.cat([modal_embeddings, txt_embeddings], 1) input_shape = embedding_output.size()[:-1]", "torch.cat([modal_embeddings, txt_embeddings], 1) input_shape = embedding_output.size()[:-1] if attention_mask is None:", "related to general usage and behavior. Parameters: config ([`MMBTConfig`]): Model", "**masked**. output_attentions (`bool`, *optional*): Whether or not to return the", "encoders, and obtain state-of-the-art performance on various multimodal classification benchmark", "if self.num_labels == 1: # We are doing regression loss_fct", "if config.num_labels==1) scores (before SoftMax). **hidden_states**: (*optional*, returned when `output_hidden_states=True`)", "masks?](../glossary#attention-mask) token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, sequence_length)`: Segment token", "Indices of positions of each input sequence tokens in the", "return the hidden states of all layers. See `hidden_states` under", "`torch.LongTensor` of shape `(batch_size, sequence_length)`: Segment token indices to indicate", "(`torch.LongTensor` of shape `(batch_size, modal_sequence_length)`, *optional*): Indices of positions of", "= torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1) if position_ids is None: position_ids =", "token to be added to Other Modality Embedding. [CLS] Most", "text transformer that is used by MMBT. It should have", "tokens that are **masked**. output_attentions (`bool`, *optional*): Whether or not", "file except in compliance with the License. # You may", "non-text modality. position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Indices", "model was proposed in [Supervised Multimodal Bitransformers for Classifying Images", "are doing regression loss_fct = MSELoss() loss = loss_fct(logits.view(-1), labels.view(-1))", "classification benchmark tasks. This model inherits from [`PreTrainedModel`]. Check the", "sequence tokens in the position embeddings for the non-text modality.", "(or regression if config.num_labels==1) loss. **logits**: `torch.FloatTensor` of shape `(batch_size,", "HuggingFace Inc. team. # # Licensed under the Apache License,", "*optional*): Whether or not to return the hidden states of", "general usage and behavior. Parameters: config ([`MMBTConfig`]): Model configuration class", "shape `(batch_size, ***)`): The other modality data. It will be", "Whether or not to return the attentions tensors of all", "a regular PyTorch Module and refer to the PyTorch documentation", "width) input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input", "file does not load the weights associated with the model,", "output_attentions (`bool`, *optional*): Whether or not to return the attentions", "config.hidden_size) self.position_embeddings = embeddings.position_embeddings self.token_type_embeddings = embeddings.token_type_embeddings self.word_embeddings = embeddings.word_embeddings", "encoder_attention_mask = torch.ones(input_shape, device=device) else: encoder_attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device),", "(one for the output of each layer + the output", "`(batch_size,)`, *optional*): Optional end token to be added to Other", "`torch.FloatTensor` of shape `(batch_size, config.num_labels)` Classification (or regression if config.num_labels==1)", "tensors for more detail. return_dict (`bool`, *optional*): Whether or not", "1 corresponds to a *sentence B* token. [What are token", "KIND, either express or implied. # See the License for", "dtype=torch.long, device=input_modal.device ) position_embeddings = self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings", "device=input_modal.device ) position_embeddings = self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings =", "other modality embeddings. Indices can be obtained using [`BertTokenizer`]. See", "model (such as downloading or saving, resizing the input embeddings,", "is computed (Mean-Square loss), If `config.num_labels > 1` a classification", "token to be added as it's appended to the end", "list of `torch.FloatTensor` (one for each layer) of shape `(batch_size,", "License. \"\"\"PyTorch MMBT model. \"\"\" import torch from torch import", "and second portions of the inputs. Indices are selected in", "state-of-the-art performance on various multimodal classification benchmark tasks. This model", "using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are", "(the \"License\"); # you may not use this file except", "extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, self.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) head_mask =", "token_type_ids=modal_token_type_ids, ) input_modal_shape = modal_embeddings.size()[:-1] if token_type_ids is None: token_type_ids", "the configuration. transformer (:class: *~nn.Module*): A text transformer that is", "modal_sequence_length)`, *optional*): Indices of positions of each input sequence tokens", "1]`. [What are position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of shape `(num_heads,)`", "outputs = self.mmbt( input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids,", "under returned tensors for more detail. return_dict (`bool`, *optional*): Whether", "modality. The embeddings from these tokens will be summed with", "return_dict if return_dict is not None else self.config.use_return_dict outputs =", "input sequence tokens in the position embeddings for the non-text", "not to return the hidden states of all layers. See", "superclass documentation for the generic methods the library implements for", "# # Unless required by applicable law or agreed to", "of the pooled output) \"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, ) class MMBTForClassification(nn.Module):", "linear layer on top of the pooled output) \"\"\", MMBT_START_DOCSTRING,", "- 0 for tokens that are **masked**. output_attentions (`bool`, *optional*):", "Mask to nullify selected heads of the self-attention modules. Mask", "for all matter related to general usage and behavior. Parameters:", "modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, labels=None, return_dict=None,", "self.encoder = encoder self.proj_embeddings = nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings = embeddings.position_embeddings", "is not None: input_txt_shape = input_ids.size() elif inputs_embeds is not", "output) \"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, ) class MMBTForClassification(nn.Module): r\"\"\" **labels**: (*optional*)", "second portions of the inputs. Indices are selected in `[0,", "inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r\"\"\" Returns: Examples::", "MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, ) class MMBTForClassification(nn.Module): r\"\"\" **labels**: (*optional*) `torch.LongTensor` of", "- 0 corresponds to a *sentence A* token, - 1", "for the second modality. It should take in a batch", "be (batch_size, channels, height, width) input_ids (`torch.LongTensor` of shape `(batch_size,", "encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs = self.transformer.encoder(", "`(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads", "implied. # See the License for the specific language governing", "self.token_type_embeddings = embeddings.token_type_embeddings self.word_embeddings = embeddings.word_embeddings self.LayerNorm = embeddings.LayerNorm self.dropout", "Model with a sequence classification/regression head on top (a linear", "[CLS] token to be added as it's appended to the", "a batch of modal inputs and return k, n dimension", "**hidden_states**: (*optional*, returned when `output_hidden_states=True`) list of `torch.FloatTensor` (one for", "labels=None, return_dict=None, ): return_dict = return_dict if return_dict is not", "language governing permissions and # limitations under the License. \"\"\"PyTorch", "by MMBT. It should have embeddings, encoder, and pooler attributes.", "Labels for computing the sequence classification/regression loss. Indices should be", "= self.mmbt( input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids,", "import logging logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = \"MMBTConfig\" class ModalEmbeddings(nn.Module):", "a decoder. encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask", "token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, sequence_length)`: Segment token indices", "to the PyTorch documentation for all matter related to general", "time\") elif input_ids is not None: input_txt_shape = input_ids.size() elif", "output = (logits,) + outputs[2:] return ((loss,) + output) if", "are **masked**. output_attentions (`bool`, *optional*): Whether or not to return", "the inputs. Indices are selected in `[0, 1]`: - 0", "the weighted average in the self-attention heads. Examples: ```python #", "by <NAME>, <NAME>, <NAME>, <NAME>. It's a supervised multimodal bitransformer", "output) if loss is not None else output return SequenceClassifierOutput(", "shape `(batch_size, sequence_length)`, *optional*): Indices of positions of each input", "and behavior. Parameters: config ([`MMBTConfig`]): Model configuration class with all", "self.config.use_return_dict if input_ids is not None and inputs_embeds is not", "of input sequence tokens in the vocabulary. It does not", "...utils import logging logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = \"MMBTConfig\" class", "Unless required by applicable law or agreed to in writing,", "= torch.cat( [torch.ones(input_modal_shape, device=device), encoder_attention_mask], dim=1 ) extended_attention_mask = self.get_extended_attention_mask(attention_mask,", "token. [What are token type IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*) `torch.LongTensor` of", "a config file does not load the weights associated with", "classification/regression head on top (a linear layer on top of", "end of other modality embeddings. Indices can be obtained using", "the specific language governing permissions and # limitations under the", "plus the initial embedding outputs. **attentions**: (*optional*, returned when `output_attentions=True`)", "the encoder input. This mask is used in the cross-attention", "self.embeddings.word_embeddings def set_input_embeddings(self, value): self.embeddings.word_embeddings = value @add_start_docstrings( \"\"\" MMBT", "of the non-text modality. The embeddings from these tokens will", "= output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states", "loss), If `config.num_labels > 1` a classification loss is computed", "from these tokens will be summed with the respective token", "from ...modeling_utils import ModuleUtilsMixin from ...utils import logging logger =", "only the configuration. transformer (:class: *~nn.Module*): A text transformer that", "0 indicates the head is **masked**. inputs_embeds (`torch.FloatTensor` of shape", "(`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to", "[torch.ones(input_modal_shape, device=device), encoder_attention_mask], dim=1 ) extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, self.device)", "Mask to avoid performing attention on padding token indices. Mask", "to be added as it's appended to the end of", "IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor` of shape `(batch_size, modal_sequence_length)`, *optional*): Indices of", "ModuleUtilsMixin): def __init__(self, config, transformer, encoder): super().__init__() self.config = config", "the attention softmax, used to compute the weighted average in", "*optional*): Whether or not to return the attentions tensors of", "the initial embedding outputs. **attentions**: (*optional*, returned when `output_attentions=True`) list", "at the output of each layer plus the initial embedding", "for classification tasks. modal_end_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional", "[What are token type IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*) `torch.LongTensor` of shape", "the respective token embeddings for the non-text modality. position_ids (`torch.LongTensor`", "model is configured as a decoder. encoder_attention_mask (`torch.FloatTensor` of shape", "The other modality data. It will be the shape that", "tokens that are **not masked**, - 0 for tokens that", "= position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if token_type_ids is None: token_type_ids = torch.zeros(", "def __init__(self, config, transformer, encoder): super().__init__() self.config = config self.transformer", "IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional start token", "else self.config.use_return_dict outputs = self.mmbt( input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask,", "import CrossEntropyLoss, MSELoss from ...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from", "of each layer plus the initial embedding outputs. **attentions**: (*optional*,", "in a batch of modal inputs and return k, n", "layers. See `attentions` under returned tensors for more detail. output_hidden_states", "- 1 corresponds to a *sentence B* token. [What are", "= encoder_outputs[0] pooled_output = self.transformer.pooler(sequence_output) if not return_dict: return (sequence_output,", "and Text](https://github.com/facebookresearch/mmbt) by <NAME>, <NAME>, <NAME>, <NAME>. It's a supervised", "MMBT_INPUTS_DOCSTRING = r\"\"\" Args: input_modal (`torch.FloatTensor` of shape `(batch_size, ***)`):", "None: raise ValueError(\"You cannot specify both input_ids and inputs_embeds at", "MMBTModel(config, transformer, encoder) \"\"\" output_attentions = output_attentions if output_attentions is", "end_token_embeds.unsqueeze(1)], dim=1) if position_ids is None: position_ids = torch.arange(seq_length, dtype=torch.long,", "self.classifier = nn.Linear(config.hidden_size, config.num_labels) def forward( self, input_modal, input_ids=None, modal_start_tokens=None,", "<NAME>. It's a supervised multimodal bitransformer model that fuses information", "`output_attentions=True`) list of `torch.FloatTensor` (one for each layer) of shape", "txt_embeddings = self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) embedding_output =", "and its affiliates. # Copyright (c) HuggingFace Inc. team. #", "loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not return_dict: output =", "input embeddings, pruning heads etc.) This model is also a", "are input IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional", "= MMBTModel(config, transformer, encoder) self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size,", "= config self.transformer = transformer self.modal_encoder = ModalEmbeddings(config, encoder, transformer.embeddings)", "tokens in the position embeddings. Selected in the range `[0,", "shape `(1,)`: Classification (or regression if config.num_labels==1) loss. **logits**: `torch.FloatTensor`", "should take in a batch of modal inputs and return", "for more detail. return_dict (`bool`, *optional*): Whether or not to", "cross-attention if the model is configured as a decoder. encoder_attention_mask", "benchmark tasks. This model inherits from [`PreTrainedModel`]. Check the superclass", "and inputs_embeds at the same time\") elif input_ids is not", "`torch.FloatTensor` of shape `(batch_size, sequence_length)`: Mask to avoid performing attention", "self.transformer.pooler(sequence_output) if not return_dict: return (sequence_output, pooled_output) + encoder_outputs[1:] return", "mmbt = MMBTModel(config, transformer, encoder) \"\"\" output_attentions = output_attentions if", "torch.cat( [torch.ones(input_modal_shape, device=device), encoder_attention_mask], dim=1 ) extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape,", "commonly used. attention_mask (*optional*) `torch.FloatTensor` of shape `(batch_size, sequence_length)`: Mask", "**logits**: `torch.FloatTensor` of shape `(batch_size, config.num_labels)` Classification (or regression if", "nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings = embeddings.position_embeddings self.token_type_embeddings = embeddings.token_type_embeddings self.word_embeddings =", "on top (a linear layer on top of the pooled", "Classification (or regression if config.num_labels==1) scores (before SoftMax). **hidden_states**: (*optional*,", "on the padding token indices of the encoder input. This", "You may obtain a copy of the License at #", "is not None else self.config.output_hidden_states ) return_dict = return_dict if", "modality embeddings. Indices can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`]", "ModalEmbeddings(config, encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def forward( self, input_modal,", "This is useful if you want more control over how", "on top of the pooled output) \"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, )", "pooled_output = self.dropout(pooled_output) logits = self.classifier(pooled_output) loss = None if", "other modality data. It will be the shape that the", "positions of each input sequence tokens in the position embeddings.", "associated with the model, only the configuration. transformer (:class: *~nn.Module*):", "return_dict = return_dict if return_dict is not None else self.config.use_return_dict", "encoder = ImageEncoder(args) model = MMBTForClassification(config, transformer, encoder) outputs =", "outputs[2:] return ((loss,) + output) if loss is not None", "(`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional end token to be", "return_dict is not None else self.config.use_return_dict if input_ids is not", "the position embeddings for the non-text modality. Selected in the", "each layer + the output of the embeddings) of shape", "not None else inputs_embeds.device modal_embeddings = self.modal_encoder( input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens,", "loss. Indices should be in `[0, ..., config.num_labels - 1]`.", "[CLS] Most commonly used for classification tasks. modal_end_tokens (`torch.LongTensor` of", "modal_end_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional end token to", "tasks. modal_end_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional end token", "device=device, dtype=torch.long), attention_mask], dim=1 ) if encoder_attention_mask is None: encoder_attention_mask", "bitransformer model that fuses information from text and other image", "Classifying Images and Text](https://github.com/facebookresearch/mmbt) by <NAME>, <NAME>, <NAME>, <NAME>. It's", "position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict, ) pooled_output = outputs[1] pooled_output", "== 1: # We are doing regression loss_fct = MSELoss()", "on top.\", MMBT_START_DOCSTRING, ) class MMBTModel(nn.Module, ModuleUtilsMixin): def __init__(self, config,", "modules. Mask values selected in `[0, 1]`: - 1 indicates", "None: attention_mask = torch.ones(input_shape, device=device) else: attention_mask = torch.cat( [torch.ones(input_modal_shape,", "..., config.num_labels - 1]`. If `config.num_labels == 1` a regression", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "+ outputs[2:] return ((loss,) + output) if loss is not", "return self.embeddings.word_embeddings def set_input_embeddings(self, value): self.embeddings.word_embeddings = value @add_start_docstrings( \"\"\"", "encoder_attention_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r\"\"\" Returns: Examples:: # For", "sequence_output = encoder_outputs[0] pooled_output = self.transformer.pooler(sequence_output) if not return_dict: return", "[What are position IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor` of shape `(batch_size, modal_sequence_length)`,", "performing attention on the padding token indices of the encoder", "embedding_output.size()[:-1] if attention_mask is None: attention_mask = torch.ones(input_shape, device=device) else:", "seq_length) if token_type_ids is None: token_type_ids = torch.zeros( (input_modal.size(0), seq_length),", "embeddings = self.dropout(embeddings) return embeddings MMBT_START_DOCSTRING = r\"\"\" MMBT model", "`(batch_size, sequence_length, embedding_dim)`, *optional*): Optionally, instead of passing `input_ids` you", "with the respective token embeddings for the non-text modality. position_ids", "in `[0, ..., config.num_labels - 1]`. If `config.num_labels == 1`", "set_input_embeddings(self, value): self.embeddings.word_embeddings = value @add_start_docstrings( \"\"\" MMBT Model with", "Selected in the range `[0, config.max_position_embeddings - 1]`. [What are", "self.get_extended_attention_mask(attention_mask, input_shape, self.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers)", "detail. output_hidden_states (`bool`, *optional*): Whether or not to return the", "you want more control over how to convert `input_ids` indices", "than the model's internal embedding lookup matrix. encoder_hidden_states (`torch.FloatTensor` of", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) mmbt =", "= torch.ones(input_txt_shape, dtype=torch.long, device=device) txt_embeddings = self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids,", "for computing the sequence classification/regression loss. Indices should be in", "License. # You may obtain a copy of the License", "matrix. encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Sequence", ") sequence_output = encoder_outputs[0] pooled_output = self.transformer.pooler(sequence_output) if not return_dict:", "token_embeddings = torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1) if end_token is not None:", "for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`: Attentions", "MMBT Model with a sequence classification/regression head on top (a", "of positions of each input sequence tokens in the position", "...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs import BaseModelOutputWithPooling, SequenceClassifierOutput", "r\"\"\" **labels**: (*optional*) `torch.LongTensor` of shape `(batch_size,)`: Labels for computing", "= (logits,) + outputs[2:] return ((loss,) + output) if loss", "modal inputs and return k, n dimension embeddings. \"\"\" MMBT_INPUTS_DOCSTRING", "super().__init__() self.config = config self.encoder = encoder self.proj_embeddings = nn.Linear(config.modal_hidden_size,", "of shape `(batch_size,)`, *optional*): Optional start token to be added", "a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch", "not None else self.config.use_return_dict outputs = self.mmbt( input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens,", "of the encoder. Used in the cross-attention if the model", "encoder input. This mask is used in the cross-attention if", "loss = None if labels is not None: if self.num_labels", "to indicate first and second portions of the inputs. Indices", "the second modality. It should take in a batch of", "of shape `(batch_size, sequence_length, hidden_size)`: Hidden-states of the model at", "# Copyright (c) HuggingFace Inc. team. # # Licensed under", "input_ids.size() elif inputs_embeds is not None: input_txt_shape = inputs_embeds.size()[:-1] else:", "We are doing regression loss_fct = MSELoss() loss = loss_fct(logits.view(-1),", "self.config = config self.encoder = encoder self.proj_embeddings = nn.Linear(config.modal_hidden_size, config.hidden_size)", "dtype=torch.long), attention_mask], dim=1 ) if encoder_attention_mask is None: encoder_attention_mask =", "*optional*): Optional end token to be added to Other Modality", "If `config.num_labels > 1` a classification loss is computed (Cross-Entropy).", "`(batch_size, modal_sequence_length)`, *optional*): Indices of positions of each input sequence", "1 token_embeddings = torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1) if position_ids is None:", "can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for", "sequence classification/regression loss. Indices should be in `[0, ..., config.num_labels", "in `[0, 1]`: - 1 indicates the head is **not", "if token_type_ids is None: token_type_ids = torch.ones(input_txt_shape, dtype=torch.long, device=device) txt_embeddings", "= input_ids.device if input_ids is not None else inputs_embeds.device modal_embeddings", "more detail. return_dict (`bool`, *optional*): Whether or not to return", "of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at", "to nullify selected heads of the self-attention modules. Mask values", "token_embeddings = torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1) if position_ids is None: position_ids", "`(batch_size, config.num_labels)` Classification (or regression if config.num_labels==1) scores (before SoftMax).", "proposed in [Supervised Multimodal Bitransformers for Classifying Images and Text](https://github.com/facebookresearch/mmbt)", "sequence tokens in the vocabulary. It does not expect [CLS]", "self.word_embeddings(end_token) seq_length += 1 token_embeddings = torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1) if", "head on top (a linear layer on top of the", "num_heads)`, *optional*): Mask to nullify selected heads of the self-attention", "is None: token_type_ids = torch.zeros( (input_modal.size(0), seq_length), dtype=torch.long, device=input_modal.device )", "selected in `[0, 1]`: - 0 corresponds to a *sentence", "various elements depending on the configuration (config) and inputs: **loss**:", "regular PyTorch Module and refer to the PyTorch documentation for", "masked**, - 0 indicates the head is **masked**. inputs_embeds (`torch.FloatTensor`", "not None: if self.num_labels == 1: # We are doing", "= CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not return_dict:", "dtype=torch.long, device=device) txt_embeddings = self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds )", "in the cross-attention if the model is configured as a", "MMBT Model outputting raw hidden-states without any specific head on", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "torch.cat( [torch.ones(input_modal_shape, device=device, dtype=torch.long), attention_mask], dim=1 ) if encoder_attention_mask is", "purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) model", "txt_embeddings], 1) input_shape = embedding_output.size()[:-1] if attention_mask is None: attention_mask", "output of the embeddings) of shape `(batch_size, sequence_length, hidden_size)`: Hidden-states", "transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) mmbt = MMBTModel(config, transformer,", "sequence_length, hidden_size)`: Hidden-states of the model at the output of", "to avoid performing attention on the padding token indices of", "required by applicable law or agreed to in writing, software", "the last layer of the encoder. Used in the cross-attention", "else: encoder_attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device), encoder_attention_mask], dim=1 ) extended_attention_mask", "seq_length), dtype=torch.long, device=input_modal.device ) position_embeddings = self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids)", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "Indices should be in `[0, ..., config.num_labels - 1]`. If", "((loss,) + output) if loss is not None else output", "= MMBTModel(config, transformer, encoder) \"\"\" output_attentions = output_attentions if output_attentions", "the cross-attention if the model is configured as a decoder.", "= MMBTForClassification(config, transformer, encoder) outputs = model(input_modal, input_ids, labels=labels) loss,", "shape `(batch_size, modal_sequence_length)`: Segment token indices to indicate different portions", "of shape `(1,)`: Classification (or regression if config.num_labels==1) loss. **logits**:", "modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict,", "agreed to in writing, software # distributed under the License", "encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Sequence of", "token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict, ) pooled_output =", "heads etc.) This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass.", "of shape `(batch_size, sequence_length)`, *optional*): Indices of positions of each", "distributed under the License is distributed on an \"AS IS\"", "+ the output of the embeddings) of shape `(batch_size, sequence_length,", "return embeddings MMBT_START_DOCSTRING = r\"\"\" MMBT model was proposed in", "token indices to indicate first and second portions of the", "torch.ones(input_shape, device=device) else: encoder_attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device), encoder_attention_mask], dim=1", "= self.token_type_embeddings(token_type_ids) embeddings = token_embeddings + position_embeddings + token_type_embeddings embeddings", "of shape `(batch_size, sequence_length, embedding_dim)`, *optional*): Optionally, instead of passing", "of shape `(batch_size, ***)`): The other modality data. It will", "= loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not return_dict: output = (logits,)", "all matter related to general usage and behavior. Parameters: config", "It does not expect [CLS] token to be added as", "second modality. It should take in a batch of modal", "not expect [CLS] token to be added as it's appended", "the sequence classification/regression loss. Indices should be in `[0, ...,", "output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict", "(c) HuggingFace Inc. team. # # Licensed under the Apache", "of a plain tuple. \"\"\" @add_start_docstrings( \"The bare MMBT Model", "or not to return the hidden states of all layers.", "shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected", "transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) model = MMBTForClassification(config, transformer,", ") extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, self.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) head_mask", "runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) model = MMBTForClassification(config,", "the encoder for that type expects. e.g. With an Image", "= outputs[:2] ```\"\"\" def __init__(self, config, transformer, encoder): super().__init__() self.num_labels", "self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings = token_embeddings + position_embeddings +", "from torch.nn import CrossEntropyLoss, MSELoss from ...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward,", "return k, n dimension embeddings. \"\"\" MMBT_INPUTS_DOCSTRING = r\"\"\" Args:", "(*optional*) `torch.FloatTensor` of shape `(batch_size, sequence_length)`: Mask to avoid performing", "does not load the weights associated with the model, only", "<NAME>, <NAME>, <NAME>, <NAME>. It's a supervised multimodal bitransformer model", "decoder. Mask values selected in `[0, 1]`: - 1 for", "inputs and return k, n dimension embeddings. \"\"\" MMBT_INPUTS_DOCSTRING =", "*sentence A* token, - 1 corresponds to a *sentence B*", "logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = \"MMBTConfig\" class ModalEmbeddings(nn.Module): \"\"\"Generic Modal", ") class MMBTModel(nn.Module, ModuleUtilsMixin): def __init__(self, config, transformer, encoder): super().__init__()", "position_ids=None, token_type_ids=None): token_embeddings = self.proj_embeddings(self.encoder(input_modal)) seq_length = token_embeddings.size(1) if start_token", "encoder_attention_mask is None: encoder_attention_mask = torch.ones(input_shape, device=device) else: encoder_attention_mask =", "(c) Facebook, Inc. and its affiliates. # Copyright (c) HuggingFace", "= logging.get_logger(__name__) _CONFIG_FOR_DOC = \"MMBTConfig\" class ModalEmbeddings(nn.Module): \"\"\"Generic Modal Embeddings", "to Other Modality Embedding. [SEP] Most commonly used. attention_mask (*optional*)", "on the configuration (config) and inputs: **loss**: (*optional*, returned when", "sequence_length, embedding_dim)`, *optional*): Optionally, instead of passing `input_ids` you can", "decoder. encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to", "r\"\"\" Returns: Examples:: # For example purposes. Not runnable. transformer", "Examples:: # For example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased')", "input_ids and inputs_embeds at the same time\") elif input_ids is", "the non-text modality. position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):", "= self.transformer.pooler(sequence_output) if not return_dict: return (sequence_output, pooled_output) + encoder_outputs[1:]", "loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not return_dict: output = (logits,) +", "```python # For example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased')", "`(batch_size, num_heads, sequence_length, sequence_length)`: Attentions weights after the attention softmax,", "OR CONDITIONS OF ANY KIND, either express or implied. #", "is **not masked**, - 0 indicates the head is **masked**.", "0 corresponds to a *sentence A* token, - 1 corresponds", "the License is distributed on an \"AS IS\" BASIS, #", "useful if you want more control over how to convert", "sequence_length)`: Attentions weights after the attention softmax, used to compute", "model's internal embedding lookup matrix. encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size,", "MMBTForClassification(nn.Module): r\"\"\" **labels**: (*optional*) `torch.LongTensor` of shape `(batch_size,)`: Labels for", "for the non-text modality. position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`,", "= torch.zeros( (input_modal.size(0), seq_length), dtype=torch.long, device=input_modal.device ) position_embeddings = self.position_embeddings(position_ids)", "if not return_dict: output = (logits,) + outputs[2:] return ((loss,)", "This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it", "values selected in `[0, 1]`: - 1 for tokens that", "internal embedding lookup matrix. encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length,", "is not None: if self.num_labels == 1: # We are", "various multimodal classification benchmark tasks. This model inherits from [`PreTrainedModel`].", "not None: input_txt_shape = input_ids.size() elif inputs_embeds is not None:", "modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None,", "of shape `(batch_size,)`: Labels for computing the sequence classification/regression loss.", "device=device) else: attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device, dtype=torch.long), attention_mask], dim=1", "to be added to Other Modality Embedding. [CLS] Most commonly", "not return_dict: return (sequence_output, pooled_output) + encoder_outputs[1:] return BaseModelOutputWithPooling( last_hidden_state=sequence_output,", "def __init__(self, config, encoder, embeddings): super().__init__() self.config = config self.encoder", "law or agreed to in writing, software # distributed under", "(logits,) + outputs[2:] return ((loss,) + output) if loss is", "for more detail. output_hidden_states (`bool`, *optional*): Whether or not to", "details. [What are input IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor` of shape `(batch_size,)`,", "was proposed in [Supervised Multimodal Bitransformers for Classifying Images and", "MMBT_INPUTS_DOCSTRING, ) class MMBTForClassification(nn.Module): r\"\"\" **labels**: (*optional*) `torch.LongTensor` of shape", "each input sequence tokens in the position embeddings. Selected in", "are selected in `[0, 1]`: - 0 corresponds to a", "```\"\"\" def __init__(self, config, transformer, encoder): super().__init__() self.num_labels = config.num_labels", "that fuses information from text and other image encoders, and", "`torch.LongTensor` of shape `(batch_size, modal_sequence_length)`: Segment token indices to indicate", "__init__(self, config, transformer, encoder): super().__init__() self.config = config self.transformer =", "regression if config.num_labels==1) scores (before SoftMax). **hidden_states**: (*optional*, returned when", "the model is configured as a decoder. Mask values selected", "self.dropout(pooled_output) logits = self.classifier(pooled_output) loss = None if labels is", "embedding.\"\"\" def __init__(self, config, encoder, embeddings): super().__init__() self.config = config", "when `output_attentions=True`) list of `torch.FloatTensor` (one for each layer) of", "outputs. **attentions**: (*optional*, returned when `output_attentions=True`) list of `torch.FloatTensor` (one", "and # limitations under the License. \"\"\"PyTorch MMBT model. \"\"\"", "scores (before SoftMax). **hidden_states**: (*optional*, returned when `output_hidden_states=True`) list of", "may obtain a copy of the License at # #", "and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor`", "embeddings, encoder, and pooler attributes. encoder (:class: *~nn.Module*): Encoder for", "output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states =", "respective token embeddings for the non-text modality. position_ids (`torch.LongTensor` of", "may not use this file except in compliance with the", "`[0, 1]`: - 1 for tokens that are **not masked**,", "return_dict (`bool`, *optional*): Whether or not to return a [`~file_utils.ModelOutput`]", "= token_embeddings + position_embeddings + token_type_embeddings embeddings = self.LayerNorm(embeddings) embeddings", "with the model, only the configuration. transformer (:class: *~nn.Module*): A", "over how to convert `input_ids` indices into associated vectors than", "= torch.ones(input_shape, device=device) else: attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device, dtype=torch.long),", "for tokens that are **masked**. output_attentions (`bool`, *optional*): Whether or", "__init__(self, config, encoder, embeddings): super().__init__() self.config = config self.encoder =", "this file except in compliance with the License. # You", "encoder. Used in the cross-attention if the model is configured", "encoder_outputs = self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states,", "+= 1 token_embeddings = torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1) if position_ids is", "the output of each layer + the output of the", "returned when `output_attentions=True`) list of `torch.FloatTensor` (one for each layer)", "self-attention heads. Examples: ```python # For example purposes. Not runnable.", "non-text modality. Selected in the range `[0, config.max_position_embeddings - 1]`.", "# # Licensed under the Apache License, Version 2.0 (the", "returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "for Classifying Images and Text](https://github.com/facebookresearch/mmbt) by <NAME>, <NAME>, <NAME>, <NAME>.", "For example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder =", "MMBT_START_DOCSTRING, ) class MMBTModel(nn.Module, ModuleUtilsMixin): def __init__(self, config, transformer, encoder):", "`config.num_labels > 1` a classification loss is computed (Cross-Entropy). Returns:", "(such as downloading or saving, resizing the input embeddings, pruning", "return ((loss,) + output) if loss is not None else", "computed (Mean-Square loss), If `config.num_labels > 1` a classification loss", "shape would be (batch_size, channels, height, width) input_ids (`torch.LongTensor` of", "pooled_output = self.transformer.pooler(sequence_output) if not return_dict: return (sequence_output, pooled_output) +", "team. # # Licensed under the Apache License, Version 2.0", "modality. It should take in a batch of modal inputs", "or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of", "encoder) self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size, config.num_labels) def forward(", "[SEP] Most commonly used. attention_mask (*optional*) `torch.FloatTensor` of shape `(batch_size,", "= ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states", "embeddings = self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return embeddings MMBT_START_DOCSTRING =", "modal_position_ids (`torch.LongTensor` of shape `(batch_size, modal_sequence_length)`, *optional*): Indices of positions", "shape `(batch_size, sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the", "transformer, encoder) self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size, config.num_labels) def", "position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if token_type_ids is None: token_type_ids =", "inputs_embeds=inputs_embeds, return_dict=return_dict, ) pooled_output = outputs[1] pooled_output = self.dropout(pooled_output) logits", "of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify", "1]`: - 0 corresponds to a *sentence A* token, -", "of shape `(batch_size, num_heads, sequence_length, sequence_length)`: Attentions weights after the", "attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None,", "height, width) input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of", "of the encoder input. This mask is used in the", "in [Supervised Multimodal Bitransformers for Classifying Images and Text](https://github.com/facebookresearch/mmbt) by", "or implied. # See the License for the specific language", "input_modal, start_token=None, end_token=None, position_ids=None, token_type_ids=None): token_embeddings = self.proj_embeddings(self.encoder(input_modal)) seq_length =", "resizing the input embeddings, pruning heads etc.) This model is", "channels, height, width) input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices", "documentation for all matter related to general usage and behavior.", "portions of the non-text modality. The embeddings from these tokens", "the model, only the configuration. transformer (:class: *~nn.Module*): A text", "- 0 indicates the head is **masked**. inputs_embeds (`torch.FloatTensor` of", "input sequence tokens in the position embeddings. Selected in the", "Segment token indices to indicate different portions of the non-text", "position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, ) input_modal_shape = modal_embeddings.size()[:-1] if token_type_ids is None:", "the PyTorch documentation for all matter related to general usage", "Optionally, instead of passing `input_ids` you can choose to directly", "model at the output of each layer plus the initial", "# We are doing regression loss_fct = MSELoss() loss =", "input_ids is not None: input_txt_shape = input_ids.size() elif inputs_embeds is", "not return_dict: output = (logits,) + outputs[2:] return ((loss,) +", "corresponds to a *sentence B* token. [What are token type", "r\"\"\" Args: input_modal (`torch.FloatTensor` of shape `(batch_size, ***)`): The other", "is not None else self.config.use_return_dict outputs = self.mmbt( input_modal=input_modal, input_ids=input_ids,", "*Tuple* comprising various elements depending on the configuration (config) and", "token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) embedding_output = torch.cat([modal_embeddings, txt_embeddings], 1) input_shape =", "IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, modal_sequence_length)`: Segment token", "else self.config.use_return_dict if input_ids is not None and inputs_embeds is", "is used by MMBT. It should have embeddings, encoder, and", "want more control over how to convert `input_ids` indices into", "is not None else output return SequenceClassifierOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states,", "a supervised multimodal bitransformer model that fuses information from text", "transformer (:class: *~nn.Module*): A text transformer that is used by", "encoder_outputs[0] pooled_output = self.transformer.pooler(sequence_output) if not return_dict: return (sequence_output, pooled_output)", "at the output of the last layer of the encoder.", "encoder = ImageEncoder(args) mmbt = MMBTModel(config, transformer, encoder) \"\"\" output_attentions", "encoder for that type expects. e.g. With an Image Encoder,", "saving, resizing the input embeddings, pruning heads etc.) This model", "= r\"\"\" MMBT model was proposed in [Supervised Multimodal Bitransformers", "more detail. output_hidden_states (`bool`, *optional*): Whether or not to return", "and pooler attributes. encoder (:class: *~nn.Module*): Encoder for the second", "Optional end token to be added to Other Modality Embedding.", "(`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Sequence of hidden-states", "Examples: ```python # For example purposes. Not runnable. transformer =", "selected in `[0, 1]`: - 1 for tokens that are", "be added to Other Modality Embedding. [CLS] Most commonly used", "`input_ids` indices into associated vectors than the model's internal embedding", "be the shape that the encoder for that type expects.", "does not expect [CLS] token to be added as it's", "passing `input_ids` you can choose to directly pass an embedded", "supervised multimodal bitransformer model that fuses information from text and", "Module and refer to the PyTorch documentation for all matter", "weighted average in the self-attention heads. Examples: ```python # For", "end_token is not None: end_token_embeds = self.word_embeddings(end_token) seq_length += 1", "refer to the PyTorch documentation for all matter related to", "= \"MMBTConfig\" class ModalEmbeddings(nn.Module): \"\"\"Generic Modal Embeddings which takes in", "shape `(batch_size, sequence_length)`: Mask to avoid performing attention on padding", "all the parameters of the model. Initializing with a config", "PyTorch Module and refer to the PyTorch documentation for all", "instead of a plain tuple. \"\"\" @add_start_docstrings( \"The bare MMBT", "is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a", "Whether or not to return a [`~file_utils.ModelOutput`] instead of a", "inputs_embeds.size()[:-1] else: raise ValueError(\"You have to specify either input_ids or", "`config.num_labels == 1` a regression loss is computed (Mean-Square loss),", "def forward(self, input_modal, start_token=None, end_token=None, position_ids=None, token_type_ids=None): token_embeddings = self.proj_embeddings(self.encoder(input_modal))", "position embeddings for the non-text modality. Selected in the range", "config.num_labels self.mmbt = MMBTModel(config, transformer, encoder) self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier", "self.dropout(embeddings) return embeddings MMBT_START_DOCSTRING = r\"\"\" MMBT model was proposed", "used by MMBT. It should have embeddings, encoder, and pooler", "(config) and inputs: **loss**: (*optional*, returned when `labels` is provided)", "(*optional*, returned when `output_attentions=True`) list of `torch.FloatTensor` (one for each", "the shape that the encoder for that type expects. e.g.", "that are **masked**. output_attentions (`bool`, *optional*): Whether or not to", "`[0, ..., config.num_labels - 1]`. If `config.num_labels == 1` a", "labels.view(-1)) if not return_dict: output = (logits,) + outputs[2:] return", "1) input_shape = embedding_output.size()[:-1] if attention_mask is None: attention_mask =", "modality. position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Indices of", "config, transformer, encoder): super().__init__() self.config = config self.transformer = transformer", "end token to be added to Other Modality Embedding. [SEP]", "of other modality embeddings. Indices can be obtained using [`BertTokenizer`].", "`(batch_size, sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the output", "1` a regression loss is computed (Mean-Square loss), If `config.num_labels", "The embeddings from these tokens will be summed with the", "heads of the self-attention modules. Mask values selected in `[0,", "to return the attentions tensors of all attention layers. See", "Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) model =", "in writing, software # distributed under the License is distributed", "top.\", MMBT_START_DOCSTRING, ) class MMBTModel(nn.Module, ModuleUtilsMixin): def __init__(self, config, transformer,", "of hidden-states at the output of the last layer of", "position embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`.", "device=device), encoder_attention_mask], dim=1 ) extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, self.device) encoder_extended_attention_mask", "# For example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder", "top (a linear layer on top of the pooled output)", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output", "License, Version 2.0 (the \"License\"); # you may not use", "- 1 for tokens that are **not masked**, - 0", "import BaseModelOutputWithPooling, SequenceClassifierOutput from ...modeling_utils import ModuleUtilsMixin from ...utils import", "(input_modal.size(0), seq_length), dtype=torch.long, device=input_modal.device ) position_embeddings = self.position_embeddings(position_ids) token_type_embeddings =", "loss, logits = outputs[:2] ```\"\"\" def __init__(self, config, transformer, encoder):", "of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in", "if return_dict is not None else self.config.use_return_dict if input_ids is", "loss = loss_fct(logits.view(-1), labels.view(-1)) else: loss_fct = CrossEntropyLoss() loss =", "\"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, ) class MMBTForClassification(nn.Module): r\"\"\" **labels**: (*optional*) `torch.LongTensor`", "head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = encoder_outputs[0]", "**masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (*optional*) `torch.LongTensor` of shape", "top of the pooled output) \"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, ) class", "the License for the specific language governing permissions and #", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "MMBT model. \"\"\" import torch from torch import nn from", "is not None else self.config.use_return_dict if input_ids is not None", "a *sentence B* token. [What are token type IDs?](../glossary#token-type-ids) modal_token_type_ids", "= embeddings.word_embeddings self.LayerNorm = embeddings.LayerNorm self.dropout = nn.Dropout(p=config.hidden_dropout_prob) def forward(self,", "is not None: start_token_embeds = self.word_embeddings(start_token) seq_length += 1 token_embeddings", "self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) embedding_output = torch.cat([modal_embeddings, txt_embeddings],", "self.word_embeddings = embeddings.word_embeddings self.LayerNorm = embeddings.LayerNorm self.dropout = nn.Dropout(p=config.hidden_dropout_prob) def", "torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1) if end_token is not None: end_token_embeds =", "k, n dimension embeddings. \"\"\" MMBT_INPUTS_DOCSTRING = r\"\"\" Args: input_modal", "= modal_embeddings.size()[:-1] if token_type_ids is None: token_type_ids = torch.ones(input_txt_shape, dtype=torch.long,", "attributes. encoder (:class: *~nn.Module*): Encoder for the second modality. It", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "self-attention modules. Mask values selected in `[0, 1]`: - 1", "config, encoder, embeddings): super().__init__() self.config = config self.encoder = encoder", "inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, embedding_dim)`, *optional*): Optionally, instead", ") embedding_output = torch.cat([modal_embeddings, txt_embeddings], 1) input_shape = embedding_output.size()[:-1] if", "forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None,", "attentions=encoder_outputs.attentions, ) def get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self, value): self.embeddings.word_embeddings", "= torch.arange(seq_length, dtype=torch.long, device=input_modal.device) position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if token_type_ids", "Most commonly used for classification tasks. modal_end_tokens (`torch.LongTensor` of shape", "in the position embeddings. Selected in the range `[0, config.max_position_embeddings", "(or regression if config.num_labels==1) scores (before SoftMax). **hidden_states**: (*optional*, returned", "attention_mask], dim=1 ) if encoder_attention_mask is None: encoder_attention_mask = torch.ones(input_shape,", "of the model at the output of each layer plus", "to Other Modality Embedding. [CLS] Most commonly used for classification", "= ModalEmbeddings(config, encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def forward( self,", "configured as a decoder. Mask values selected in `[0, 1]`:", "and return k, n dimension embeddings. \"\"\" MMBT_INPUTS_DOCSTRING = r\"\"\"", "the hidden states of all layers. See `hidden_states` under returned", "embeddings.word_embeddings self.LayerNorm = embeddings.LayerNorm self.dropout = nn.Dropout(p=config.hidden_dropout_prob) def forward(self, input_modal,", "attention_mask (*optional*) `torch.FloatTensor` of shape `(batch_size, sequence_length)`: Mask to avoid", "# distributed under the License is distributed on an \"AS", "import nn from torch.nn import CrossEntropyLoss, MSELoss from ...file_utils import", "`torch.FloatTensor` of shape `(1,)`: Classification (or regression if config.num_labels==1) loss.", "# Unless required by applicable law or agreed to in", "PyTorch documentation for all matter related to general usage and", "[`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input", "specific head on top.\", MMBT_START_DOCSTRING, ) class MMBTModel(nn.Module, ModuleUtilsMixin): def", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "*optional*): Optional start token to be added to Other Modality", "coding=utf-8 # Copyright (c) Facebook, Inc. and its affiliates. #", "an encoder, and a transformer embedding.\"\"\" def __init__(self, config, encoder,", "position_ids is None: position_ids = torch.arange(seq_length, dtype=torch.long, device=input_modal.device) position_ids =", "the Apache License, Version 2.0 (the \"License\"); # you may", "into associated vectors than the model's internal embedding lookup matrix.", "= loss_fct(logits.view(-1), labels.view(-1)) else: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1,", "that are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (*optional*) `torch.LongTensor`", "hidden_size)`, *optional*): Sequence of hidden-states at the output of the", "input_modal (`torch.FloatTensor` of shape `(batch_size, ***)`): The other modality data.", "type IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, modal_sequence_length)`: Segment", "tasks. This model inherits from [`PreTrainedModel`]. Check the superclass documentation", "last layer of the encoder. Used in the cross-attention if", "each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`: Attentions weights", "self.LayerNorm = embeddings.LayerNorm self.dropout = nn.Dropout(p=config.hidden_dropout_prob) def forward(self, input_modal, start_token=None,", "sequence_length)`: Segment token indices to indicate first and second portions", "modality data. It will be the shape that the encoder", "shape `(batch_size, sequence_length)`: Segment token indices to indicate first and", "regression loss_fct = MSELoss() loss = loss_fct(logits.view(-1), labels.view(-1)) else: loss_fct", "head is **not masked**, - 0 indicates the head is", "not load the weights associated with the model, only the", "raise ValueError(\"You have to specify either input_ids or inputs_embeds\") device", "logits = self.classifier(pooled_output) loss = None if labels is not", "these tokens will be summed with the respective token embeddings", "tensors of all attention layers. See `attentions` under returned tensors", "It will be the shape that the encoder for that", "= BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) mmbt = MMBTModel(config, transformer, encoder)", "Text](https://github.com/facebookresearch/mmbt) by <NAME>, <NAME>, <NAME>, <NAME>. It's a supervised multimodal", "directly pass an embedded representation. This is useful if you", "It's a supervised multimodal bitransformer model that fuses information from", "BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) mmbt = MMBTModel(config, transformer, encoder) \"\"\"", "transformer, encoder) outputs = model(input_modal, input_ids, labels=labels) loss, logits =", "token_embeddings + position_embeddings + token_type_embeddings embeddings = self.LayerNorm(embeddings) embeddings =", "SequenceClassifierOutput from ...modeling_utils import ModuleUtilsMixin from ...utils import logging logger", "layer + the output of the embeddings) of shape `(batch_size,", "embedding lookup matrix. encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`,", "modal_embeddings = self.modal_encoder( input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, ) input_modal_shape", "commonly used for classification tasks. modal_end_tokens (`torch.LongTensor` of shape `(batch_size,)`,", "the output of each layer plus the initial embedding outputs.", "take in a batch of modal inputs and return k,", "self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs = self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask,", "for all its model (such as downloading or saving, resizing", "None: end_token_embeds = self.word_embeddings(end_token) seq_length += 1 token_embeddings = torch.cat([token_embeddings,", "sequence_length)`: Mask to avoid performing attention on padding token indices.", "with a sequence classification/regression head on top (a linear layer", "torch.nn import CrossEntropyLoss, MSELoss from ...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings", "None: encoder_attention_mask = torch.ones(input_shape, device=device) else: encoder_attention_mask = torch.cat( [torch.ones(input_modal_shape,", "1]`: - 1 indicates the head is **not masked**, -", "Use it as a regular PyTorch Module and refer to", "comprising various elements depending on the configuration (config) and inputs:", "after the attention softmax, used to compute the weighted average", "under the License is distributed on an \"AS IS\" BASIS,", "head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask", "PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch Module", "1 token_embeddings = torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1) if end_token is not", "self.modal_encoder( input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, ) input_modal_shape = modal_embeddings.size()[:-1]", "embeddings.LayerNorm self.dropout = nn.Dropout(p=config.hidden_dropout_prob) def forward(self, input_modal, start_token=None, end_token=None, position_ids=None,", "layers. See `hidden_states` under returned tensors for more detail. return_dict", "): r\"\"\" Returns: Examples:: # For example purposes. Not runnable.", "elif inputs_embeds is not None: input_txt_shape = inputs_embeds.size()[:-1] else: raise", "Hidden-states of the model at the output of each layer", "input_shape = embedding_output.size()[:-1] if attention_mask is None: attention_mask = torch.ones(input_shape,", "(:class: *~nn.Module*): Encoder for the second modality. It should take", "and a transformer embedding.\"\"\" def __init__(self, config, encoder, embeddings): super().__init__()", "all layers. See `hidden_states` under returned tensors for more detail.", "associated vectors than the model's internal embedding lookup matrix. encoder_hidden_states", "`attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*):", "shape `(batch_size, config.num_labels)` Classification (or regression if config.num_labels==1) scores (before", "if output_attentions is not None else self.config.output_attentions output_hidden_states = (", "= torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1) if end_token is not None: end_token_embeds", "modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, labels=None, return_dict=None, ): return_dict =", "modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict, )", "as downloading or saving, resizing the input embeddings, pruning heads", "`[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor`", "\"\"\" MMBT_INPUTS_DOCSTRING = r\"\"\" Args: input_modal (`torch.FloatTensor` of shape `(batch_size,", "`[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor`", "returned when `labels` is provided) `torch.FloatTensor` of shape `(1,)`: Classification", "when `labels` is provided) `torch.FloatTensor` of shape `(1,)`: Classification (or", "indices. Mask values selected in `[0, 1]`: - 1 for", "self.config.use_return_dict outputs = self.mmbt( input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids,", "sequence_length)`, *optional*): Indices of positions of each input sequence tokens", "the padding token indices of the encoder input. This mask", "(`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing", "self.transformer = transformer self.modal_encoder = ModalEmbeddings(config, encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling,", "appended to the end of other modality embeddings. Indices can", "return BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) def get_input_embeddings(self): return", "vectors than the model's internal embedding lookup matrix. encoder_hidden_states (`torch.FloatTensor`", "elif input_ids is not None: input_txt_shape = input_ids.size() elif inputs_embeds", "classification tasks. modal_end_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional end", ") if encoder_attention_mask is None: encoder_attention_mask = torch.ones(input_shape, device=device) else:", "of shape `(batch_size,)`, *optional*): Optional end token to be added", "return_dict if return_dict is not None else self.config.use_return_dict if input_ids", "layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`: Attentions weights after", "a transformer embedding.\"\"\" def __init__(self, config, encoder, embeddings): super().__init__() self.config", "seq_length = token_embeddings.size(1) if start_token is not None: start_token_embeds =", "ANY KIND, either express or implied. # See the License", "the License. # You may obtain a copy of the", "from ...modeling_outputs import BaseModelOutputWithPooling, SequenceClassifierOutput from ...modeling_utils import ModuleUtilsMixin from", "input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None,", "doing regression loss_fct = MSELoss() loss = loss_fct(logits.view(-1), labels.view(-1)) else:", "# See the License for the specific language governing permissions", "<NAME>, <NAME>, <NAME>. It's a supervised multimodal bitransformer model that", "output_hidden_states (`bool`, *optional*): Whether or not to return the hidden", "= self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) embedding_output = torch.cat([modal_embeddings,", "*~nn.Module*): Encoder for the second modality. It should take in", "classification/regression loss. Indices should be in `[0, ..., config.num_labels -", "([`MMBTConfig`]): Model configuration class with all the parameters of the", "else: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if", "config.num_labels==1) loss. **logits**: `torch.FloatTensor` of shape `(batch_size, config.num_labels)` Classification (or", "Images and Text](https://github.com/facebookresearch/mmbt) by <NAME>, <NAME>, <NAME>, <NAME>. It's a", "modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, encoder_hidden_states=None,", "inputs: **loss**: (*optional*, returned when `labels` is provided) `torch.FloatTensor` of", "outputting raw hidden-states without any specific head on top.\", MMBT_START_DOCSTRING,", "a sequence classification/regression head on top (a linear layer on", "encoder self.proj_embeddings = nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings = embeddings.position_embeddings self.token_type_embeddings =", "when `output_hidden_states=True`) list of `torch.FloatTensor` (one for the output of", "sequence_length)`, *optional*): Mask to avoid performing attention on the padding", "attention_mask = torch.ones(input_shape, device=device) else: attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device,", "embedding outputs. **attentions**: (*optional*, returned when `output_attentions=True`) list of `torch.FloatTensor`", "= return_dict if return_dict is not None else self.config.use_return_dict outputs", "vocabulary. It does not expect [CLS] token to be added", "from torch import nn from torch.nn import CrossEntropyLoss, MSELoss from", "**labels**: (*optional*) `torch.LongTensor` of shape `(batch_size,)`: Labels for computing the", "self.proj_embeddings = nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings = embeddings.position_embeddings self.token_type_embeddings = embeddings.token_type_embeddings", "the end of other modality embeddings. Indices can be obtained", "of all attention layers. See `attentions` under returned tensors for", "of `torch.FloatTensor` (one for the output of each layer +", "to a *sentence B* token. [What are token type IDs?](../glossary#token-type-ids)", "as a regular PyTorch Module and refer to the PyTorch", "\"\"\" output_attentions = output_attentions if output_attentions is not None else", "loss is computed (Mean-Square loss), If `config.num_labels > 1` a", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not", "expect [CLS] token to be added as it's appended to", "writing, software # distributed under the License is distributed on", "shape that the encoder for that type expects. e.g. With", "- 1]`. If `config.num_labels == 1` a regression loss is", "if position_ids is None: position_ids = torch.arange(seq_length, dtype=torch.long, device=input_modal.device) position_ids", "): return_dict = return_dict if return_dict is not None else", "Image Encoder, the shape would be (batch_size, channels, height, width)", "encoder_attention_mask], dim=1 ) extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, self.device) encoder_extended_attention_mask =", "value): self.embeddings.word_embeddings = value @add_start_docstrings( \"\"\" MMBT Model with a", "that the encoder for that type expects. e.g. With an", "in the position embeddings for the non-text modality. Selected in", "Modal Embeddings which takes in an encoder, and a transformer", "all attention layers. See `attentions` under returned tensors for more", "replace_return_docstrings from ...modeling_outputs import BaseModelOutputWithPooling, SequenceClassifierOutput from ...modeling_utils import ModuleUtilsMixin", "= self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs = self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states,", "hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) def get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self, value):", "position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Indices of positions", "sequence classification/regression head on top (a linear layer on top", "torch.ones(input_txt_shape, dtype=torch.long, device=device) txt_embeddings = self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds", "Other Modality Embedding. [SEP] Most commonly used. attention_mask (*optional*) `torch.FloatTensor`", "output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = encoder_outputs[0] pooled_output = self.transformer.pooler(sequence_output)", "load the weights associated with the model, only the configuration.", "None: if self.num_labels == 1: # We are doing regression", "summed with the respective token embeddings for the non-text modality.", "modality. Selected in the range `[0, config.max_position_embeddings - 1]`. [What", "inputs_embeds=None, labels=None, return_dict=None, ): return_dict = return_dict if return_dict is", "that are **not masked**, - 0 for tokens that are", "import ModuleUtilsMixin from ...utils import logging logger = logging.get_logger(__name__) _CONFIG_FOR_DOC", "is None: attention_mask = torch.ones(input_shape, device=device) else: attention_mask = torch.cat(", "control over how to convert `input_ids` indices into associated vectors", "# limitations under the License. \"\"\"PyTorch MMBT model. \"\"\" import", "MMBT model was proposed in [Supervised Multimodal Bitransformers for Classifying", "average in the self-attention heads. Examples: ```python # For example", "nn.Linear(config.hidden_size, config.num_labels) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None,", "loss. **logits**: `torch.FloatTensor` of shape `(batch_size, config.num_labels)` Classification (or regression", "= config.num_labels self.mmbt = MMBTModel(config, transformer, encoder) self.dropout = nn.Dropout(config.hidden_dropout_prob)", "of the inputs. Indices are selected in `[0, 1]`: -", "A* token, - 1 corresponds to a *sentence B* token.", "to a *sentence A* token, - 1 corresponds to a", "should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels", "takes in an encoder, and a transformer embedding.\"\"\" def __init__(self,", "if start_token is not None: start_token_embeds = self.word_embeddings(start_token) seq_length +=", "Other Modality Embedding. [CLS] Most commonly used for classification tasks.", "layer plus the initial embedding outputs. **attentions**: (*optional*, returned when", "the encoder. Used in the cross-attention if the model is", "else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not", "None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is", "nn from torch.nn import CrossEntropyLoss, MSELoss from ...file_utils import add_start_docstrings,", "self.invert_attention_mask(encoder_attention_mask) head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs = self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask,", "raise ValueError(\"You cannot specify both input_ids and inputs_embeds at the", "if loss is not None else output return SequenceClassifierOutput( loss=loss,", "token embeddings for the non-text modality. position_ids (`torch.LongTensor` of shape", "the model is configured as a decoder. encoder_attention_mask (`torch.FloatTensor` of", "in `[0, 1]`: - 0 corresponds to a *sentence A*", "tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (*optional*)", "torch import nn from torch.nn import CrossEntropyLoss, MSELoss from ...file_utils", "sequence_length)`): Indices of input sequence tokens in the vocabulary. It", "forward(self, input_modal, start_token=None, end_token=None, position_ids=None, token_type_ids=None): token_embeddings = self.proj_embeddings(self.encoder(input_modal)) seq_length", "\"\"\"PyTorch MMBT model. \"\"\" import torch from torch import nn", "token_type_embeddings embeddings = self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return embeddings MMBT_START_DOCSTRING", "positions of each input sequence tokens in the position embeddings", "the attentions tensors of all attention layers. See `attentions` under", "shape `(batch_size, num_heads, sequence_length, sequence_length)`: Attentions weights after the attention", "config self.encoder = encoder self.proj_embeddings = nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings =", "the same time\") elif input_ids is not None: input_txt_shape =", "that is used by MMBT. It should have embeddings, encoder,", "return (sequence_output, pooled_output) + encoder_outputs[1:] return BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states,", "`(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on the", "would be (batch_size, channels, height, width) input_ids (`torch.LongTensor` of shape", "weights associated with the model, only the configuration. transformer (:class:", "image encoders, and obtain state-of-the-art performance on various multimodal classification", "`torch.LongTensor` of shape `(batch_size,)`: Labels for computing the sequence classification/regression", "inputs_embeds is not None: input_txt_shape = inputs_embeds.size()[:-1] else: raise ValueError(\"You", "if end_token is not None: end_token_embeds = self.word_embeddings(end_token) seq_length +=", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "config, transformer, encoder): super().__init__() self.num_labels = config.num_labels self.mmbt = MMBTModel(config,", "of each input sequence tokens in the position embeddings. Selected", "input IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional start", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "Returns: Examples:: # For example purposes. Not runnable. transformer =", "input_ids or inputs_embeds\") device = input_ids.device if input_ids is not", "heads. Examples: ```python # For example purposes. Not runnable. transformer", "class ModalEmbeddings(nn.Module): \"\"\"Generic Modal Embeddings which takes in an encoder,", "returned tensors for more detail. return_dict (`bool`, *optional*): Whether or", "output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = encoder_outputs[0] pooled_output = self.transformer.pooler(sequence_output) if", "See `hidden_states` under returned tensors for more detail. return_dict (`bool`,", "type expects. e.g. With an Image Encoder, the shape would", "position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`,", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "None: start_token_embeds = self.word_embeddings(start_token) seq_length += 1 token_embeddings = torch.cat([start_token_embeds.unsqueeze(1),", "token type IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, modal_sequence_length)`:", "*optional*): Mask to nullify selected heads of the self-attention modules.", "loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not", "is provided) `torch.FloatTensor` of shape `(1,)`: Classification (or regression if", "num_heads, sequence_length, sequence_length)`: Attentions weights after the attention softmax, used", "= self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict,", "device=device) else: encoder_attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device), encoder_attention_mask], dim=1 )", "self.token_type_embeddings(token_type_ids) embeddings = token_embeddings + position_embeddings + token_type_embeddings embeddings =", "`labels` is provided) `torch.FloatTensor` of shape `(1,)`: Classification (or regression", "config self.transformer = transformer self.modal_encoder = ModalEmbeddings(config, encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING)", "self.num_labels = config.num_labels self.mmbt = MMBTModel(config, transformer, encoder) self.dropout =", "device=device) txt_embeddings = self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) embedding_output", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "layer of the encoder. Used in the cross-attention if the", "= self.dropout(embeddings) return embeddings MMBT_START_DOCSTRING = r\"\"\" MMBT model was", "configured as a decoder. encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`,", "Check the superclass documentation for the generic methods the library", "\"\"\" MMBT Model with a sequence classification/regression head on top", "self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return embeddings MMBT_START_DOCSTRING = r\"\"\" MMBT", "library implements for all its model (such as downloading or", "a classification loss is computed (Cross-Entropy). Returns: *Tuple* comprising various", "are position IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor` of shape `(batch_size, modal_sequence_length)`, *optional*):", "*optional*): Mask to avoid performing attention on the padding token", "position_embeddings + token_type_embeddings embeddings = self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return", "specific language governing permissions and # limitations under the License.", "1]`: - 1 for tokens that are **not masked**, -", "(*optional*, returned when `labels` is provided) `torch.FloatTensor` of shape `(1,)`:", "transformer self.modal_encoder = ModalEmbeddings(config, encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def", "model, only the configuration. transformer (:class: *~nn.Module*): A text transformer", "and other image encoders, and obtain state-of-the-art performance on various", "dim=1) if position_ids is None: position_ids = torch.arange(seq_length, dtype=torch.long, device=input_modal.device)", "( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states )", "sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the output of", "token_embeddings.size(1) if start_token is not None: start_token_embeds = self.word_embeddings(start_token) seq_length", "MMBTForClassification(config, transformer, encoder) outputs = model(input_modal, input_ids, labels=labels) loss, logits", "dim=1 ) extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, self.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask)", "purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) mmbt", "torch.zeros( (input_modal.size(0), seq_length), dtype=torch.long, device=input_modal.device ) position_embeddings = self.position_embeddings(position_ids) token_type_embeddings", "behavior. Parameters: config ([`MMBTConfig`]): Model configuration class with all the", "= nn.Dropout(p=config.hidden_dropout_prob) def forward(self, input_modal, start_token=None, end_token=None, position_ids=None, token_type_ids=None): token_embeddings", "[torch.ones(input_modal_shape, device=device, dtype=torch.long), attention_mask], dim=1 ) if encoder_attention_mask is None:", "start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, ) input_modal_shape = modal_embeddings.size()[:-1] if token_type_ids", "# you may not use this file except in compliance", "etc.) This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use", "states of all layers. See `hidden_states` under returned tensors for", "(:class: *~nn.Module*): A text transformer that is used by MMBT.", "**not masked**, - 0 for tokens that are **masked**. output_attentions", "the output of the embeddings) of shape `(batch_size, sequence_length, hidden_size)`:", "the input embeddings, pruning heads etc.) This model is also", "documentation for the generic methods the library implements for all", "is None: position_ids = torch.arange(seq_length, dtype=torch.long, device=input_modal.device) position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0),", "Used in the cross-attention if the model is configured as", "token_type_ids=None): token_embeddings = self.proj_embeddings(self.encoder(input_modal)) seq_length = token_embeddings.size(1) if start_token is", "not to return a [`~file_utils.ModelOutput`] instead of a plain tuple.", "if encoder_attention_mask is None: encoder_attention_mask = torch.ones(input_shape, device=device) else: encoder_attention_mask", "= embeddings.position_embeddings self.token_type_embeddings = embeddings.token_type_embeddings self.word_embeddings = embeddings.word_embeddings self.LayerNorm =", "Indices of input sequence tokens in the vocabulary. It does", "compute the weighted average in the self-attention heads. Examples: ```python", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "that type expects. e.g. With an Image Encoder, the shape", "the model's internal embedding lookup matrix. encoder_hidden_states (`torch.FloatTensor` of shape", "token_type_ids = torch.ones(input_txt_shape, dtype=torch.long, device=device) txt_embeddings = self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids,", "Bitransformers for Classifying Images and Text](https://github.com/facebookresearch/mmbt) by <NAME>, <NAME>, <NAME>,", "(batch_size, channels, height, width) input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`):", "position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, ):", "`output_hidden_states=True`) list of `torch.FloatTensor` (one for the output of each", "return_dict=return_dict, ) sequence_output = encoder_outputs[0] pooled_output = self.transformer.pooler(sequence_output) if not", "representation. This is useful if you want more control over", "self.word_embeddings(start_token) seq_length += 1 token_embeddings = torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1) if", "under the Apache License, Version 2.0 (the \"License\"); # you", "embeddings): super().__init__() self.config = config self.encoder = encoder self.proj_embeddings =", "device = input_ids.device if input_ids is not None else inputs_embeds.device", "with all the parameters of the model. Initializing with a", "for the generic methods the library implements for all its", "Modality Embedding. [CLS] Most commonly used for classification tasks. modal_end_tokens", "Facebook, Inc. and its affiliates. # Copyright (c) HuggingFace Inc.", "Model outputting raw hidden-states without any specific head on top.\",", "will be the shape that the encoder for that type", "the head is **masked**. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length,", "**loss**: (*optional*, returned when `labels` is provided) `torch.FloatTensor` of shape", "(`bool`, *optional*): Whether or not to return the attentions tensors", "***)`): The other modality data. It will be the shape", "input_ids.device if input_ids is not None else inputs_embeds.device modal_embeddings =", "to avoid performing attention on padding token indices. Mask values", "end_token_embeds = self.word_embeddings(end_token) seq_length += 1 token_embeddings = torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)],", "pass an embedded representation. This is useful if you want", "can choose to directly pass an embedded representation. This is", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "*sentence B* token. [What are token type IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*)", "cross-attention if the model is configured as a decoder. Mask", "add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs import BaseModelOutputWithPooling, SequenceClassifierOutput from ...modeling_utils import", "dimension embeddings. \"\"\" MMBT_INPUTS_DOCSTRING = r\"\"\" Args: input_modal (`torch.FloatTensor` of", "= embeddings.LayerNorm self.dropout = nn.Dropout(p=config.hidden_dropout_prob) def forward(self, input_modal, start_token=None, end_token=None,", "masked**, - 0 for tokens that are **masked**. output_attentions (`bool`,", "embeddings, pruning heads etc.) This model is also a PyTorch", "modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict, ) pooled_output = outputs[1]", "under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether", "Returns: *Tuple* comprising various elements depending on the configuration (config)", "inputs. Indices are selected in `[0, 1]`: - 0 corresponds", "an embedded representation. This is useful if you want more", "not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states", "encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def forward( self, input_modal, input_ids=None,", "avoid performing attention on the padding token indices of the", "an Image Encoder, the shape would be (batch_size, channels, height,", "how to convert `input_ids` indices into associated vectors than the", "the output of the last layer of the encoder. Used", "specify either input_ids or inputs_embeds\") device = input_ids.device if input_ids", "encoder) outputs = model(input_modal, input_ids, labels=labels) loss, logits = outputs[:2]", "of each input sequence tokens in the position embeddings for", "transformer that is used by MMBT. It should have embeddings,", "*~nn.Module*): A text transformer that is used by MMBT. It", ") pooled_output = outputs[1] pooled_output = self.dropout(pooled_output) logits = self.classifier(pooled_output)", "logging.get_logger(__name__) _CONFIG_FOR_DOC = \"MMBTConfig\" class ModalEmbeddings(nn.Module): \"\"\"Generic Modal Embeddings which", "padding token indices. Mask values selected in `[0, 1]`: -", "self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size, config.num_labels) def forward( self,", "the position embeddings. Selected in the range `[0, config.max_position_embeddings -", "CrossEntropyLoss, MSELoss from ...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs", "it's appended to the end of other modality embeddings. Indices", "import torch from torch import nn from torch.nn import CrossEntropyLoss,", "None else self.config.use_return_dict outputs = self.mmbt( input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens,", "Mask values selected in `[0, 1]`: - 1 indicates the", "1: # We are doing regression loss_fct = MSELoss() loss", "embedding_dim)`, *optional*): Optionally, instead of passing `input_ids` you can choose", "model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, )", "output_attentions=None, output_hidden_states=None, return_dict=None, ): r\"\"\" Returns: Examples:: # For example", "[Supervised Multimodal Bitransformers for Classifying Images and Text](https://github.com/facebookresearch/mmbt) by <NAME>,", "MMBTModel(nn.Module, ModuleUtilsMixin): def __init__(self, config, transformer, encoder): super().__init__() self.config =", "ValueError(\"You cannot specify both input_ids and inputs_embeds at the same", "and obtain state-of-the-art performance on various multimodal classification benchmark tasks.", "encoder) \"\"\" output_attentions = output_attentions if output_attentions is not None", "which takes in an encoder, and a transformer embedding.\"\"\" def", "output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict", "the configuration (config) and inputs: **loss**: (*optional*, returned when `labels`", "the License. \"\"\"PyTorch MMBT model. \"\"\" import torch from torch", "= token_embeddings.size(1) if start_token is not None: start_token_embeds = self.word_embeddings(start_token)", "embeddings. Indices can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and", "of modal inputs and return k, n dimension embeddings. \"\"\"", "different portions of the non-text modality. The embeddings from these", "Apache License, Version 2.0 (the \"License\"); # you may not", "or inputs_embeds\") device = input_ids.device if input_ids is not None", "encoder_outputs[1:] return BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) def get_input_embeddings(self):", "either express or implied. # See the License for the", "hidden-states without any specific head on top.\", MMBT_START_DOCSTRING, ) class", "Encoder, the shape would be (batch_size, channels, height, width) input_ids", "if you want more control over how to convert `input_ids`", "indicates the head is **masked**. inputs_embeds (`torch.FloatTensor` of shape `(batch_size,", "the embeddings) of shape `(batch_size, sequence_length, hidden_size)`: Hidden-states of the", "`torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length,", "without any specific head on top.\", MMBT_START_DOCSTRING, ) class MMBTModel(nn.Module,", "model. \"\"\" import torch from torch import nn from torch.nn", "embeddings.token_type_embeddings self.word_embeddings = embeddings.word_embeddings self.LayerNorm = embeddings.LayerNorm self.dropout = nn.Dropout(p=config.hidden_dropout_prob)", "and inputs_embeds is not None: raise ValueError(\"You cannot specify both", "encoder): super().__init__() self.config = config self.transformer = transformer self.modal_encoder =", "= self.invert_attention_mask(encoder_attention_mask) head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs = self.transformer.encoder( embedding_output,", "if token_type_ids is None: token_type_ids = torch.zeros( (input_modal.size(0), seq_length), dtype=torch.long,", "inputs_embeds\") device = input_ids.device if input_ids is not None else", "Classification (or regression if config.num_labels==1) loss. **logits**: `torch.FloatTensor` of shape", "**attentions**: (*optional*, returned when `output_attentions=True`) list of `torch.FloatTensor` (one for", "each layer plus the initial embedding outputs. **attentions**: (*optional*, returned", "are **not masked**, - 0 for tokens that are **masked**.", "is computed (Cross-Entropy). Returns: *Tuple* comprising various elements depending on", "IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*):", "its affiliates. # Copyright (c) HuggingFace Inc. team. # #", "e.g. With an Image Encoder, the shape would be (batch_size,", "Modality Embedding. [SEP] Most commonly used. attention_mask (*optional*) `torch.FloatTensor` of", "of shape `(batch_size, sequence_length)`: Mask to avoid performing attention on", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "the library implements for all its model (such as downloading", "of each layer + the output of the embeddings) of", "pooler attributes. encoder (:class: *~nn.Module*): Encoder for the second modality.", "plain tuple. \"\"\" @add_start_docstrings( \"The bare MMBT Model outputting raw", "self.modal_encoder = ModalEmbeddings(config, encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def forward(", "either input_ids or inputs_embeds\") device = input_ids.device if input_ids is", "+ position_embeddings + token_type_embeddings embeddings = self.LayerNorm(embeddings) embeddings = self.dropout(embeddings)", "`(batch_size, sequence_length)`: Segment token indices to indicate first and second", "the self-attention modules. Mask values selected in `[0, 1]`: -", "selected heads of the self-attention modules. Mask values selected in", "loss_fct(logits.view(-1), labels.view(-1)) else: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels),", "it as a regular PyTorch Module and refer to the", "Most commonly used. attention_mask (*optional*) `torch.FloatTensor` of shape `(batch_size, sequence_length)`:", "generic methods the library implements for all its model (such", "subclass. Use it as a regular PyTorch Module and refer", "With an Image Encoder, the shape would be (batch_size, channels,", "model that fuses information from text and other image encoders,", "to general usage and behavior. Parameters: config ([`MMBTConfig`]): Model configuration", "= ImageEncoder(args) model = MMBTForClassification(config, transformer, encoder) outputs = model(input_modal,", "tokens will be summed with the respective token embeddings for", "raw hidden-states without any specific head on top.\", MMBT_START_DOCSTRING, )", "1]`. If `config.num_labels == 1` a regression loss is computed", "of the embeddings) of shape `(batch_size, sequence_length, hidden_size)`: Hidden-states of", "and refer to the PyTorch documentation for all matter related", "the weights associated with the model, only the configuration. transformer", "start_token=None, end_token=None, position_ids=None, token_type_ids=None): token_embeddings = self.proj_embeddings(self.encoder(input_modal)) seq_length = token_embeddings.size(1)", "= embeddings.token_type_embeddings self.word_embeddings = embeddings.word_embeddings self.LayerNorm = embeddings.LayerNorm self.dropout =", "model inherits from [`PreTrainedModel`]. Check the superclass documentation for the", "get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self, value): self.embeddings.word_embeddings = value @add_start_docstrings(", "[torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch Module and", "at the same time\") elif input_ids is not None: input_txt_shape", "return a [`~file_utils.ModelOutput`] instead of a plain tuple. \"\"\" @add_start_docstrings(", "torch.arange(seq_length, dtype=torch.long, device=input_modal.device) position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if token_type_ids is", ") position_embeddings = self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings = token_embeddings", "*optional*): Optionally, instead of passing `input_ids` you can choose to", "token to be added to Other Modality Embedding. [SEP] Most", "indicate different portions of the non-text modality. The embeddings from", "encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = encoder_outputs[0] pooled_output", "position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, labels=None, return_dict=None, ): return_dict = return_dict", "other image encoders, and obtain state-of-the-art performance on various multimodal", "= nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size, config.num_labels) def forward( self, input_modal,", "`[0, 1]`: - 1 indicates the head is **not masked**,", "use this file except in compliance with the License. #", "device=input_modal.device) position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if token_type_ids is None: token_type_ids", "def set_input_embeddings(self, value): self.embeddings.word_embeddings = value @add_start_docstrings( \"\"\" MMBT Model", "dim=1 ) if encoder_attention_mask is None: encoder_attention_mask = torch.ones(input_shape, device=device)", "n dimension embeddings. \"\"\" MMBT_INPUTS_DOCSTRING = r\"\"\" Args: input_modal (`torch.FloatTensor`", "head is **masked**. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, embedding_dim)`,", "shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on", "added to Other Modality Embedding. [CLS] Most commonly used for", "in the self-attention heads. Examples: ```python # For example purposes.", "not None: raise ValueError(\"You cannot specify both input_ids and inputs_embeds", "token indices of the encoder input. This mask is used", "modal_token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, modal_sequence_length)`: Segment token indices", "`(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary.", "super().__init__() self.num_labels = config.num_labels self.mmbt = MMBTModel(config, transformer, encoder) self.dropout", "- 1]`. [What are position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of shape", "def get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self, value): self.embeddings.word_embeddings = value", "hidden states of all layers. See `hidden_states` under returned tensors", "- 1 indicates the head is **not masked**, - 0", "token indices. Mask values selected in `[0, 1]`: - 1", "class with all the parameters of the model. Initializing with", "1]`. [What are position IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor` of shape `(batch_size,", "= input_ids.size() elif inputs_embeds is not None: input_txt_shape = inputs_embeds.size()[:-1]", "BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) def get_input_embeddings(self): return self.embeddings.word_embeddings", "position IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor` of shape `(batch_size, modal_sequence_length)`, *optional*): Indices", "config.num_labels==1) scores (before SoftMax). **hidden_states**: (*optional*, returned when `output_hidden_states=True`) list", "the shape would be (batch_size, channels, height, width) input_ids (`torch.LongTensor`", "as it's appended to the end of other modality embeddings.", "input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) embedding_output = torch.cat([modal_embeddings, txt_embeddings], 1)", "regression if config.num_labels==1) loss. **logits**: `torch.FloatTensor` of shape `(batch_size, config.num_labels)`", "governing permissions and # limitations under the License. \"\"\"PyTorch MMBT", "This model inherits from [`PreTrainedModel`]. Check the superclass documentation for", "indicate first and second portions of the inputs. Indices are", "MSELoss from ...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs import", "config file does not load the weights associated with the", "input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds,", "embeddings. \"\"\" MMBT_INPUTS_DOCSTRING = r\"\"\" Args: input_modal (`torch.FloatTensor` of shape", "in compliance with the License. # You may obtain a", "is not None: input_txt_shape = inputs_embeds.size()[:-1] else: raise ValueError(\"You have", "software # distributed under the License is distributed on an", "super().__init__() self.config = config self.transformer = transformer self.modal_encoder = ModalEmbeddings(config,", "= self.classifier(pooled_output) loss = None if labels is not None:", "seq_length += 1 token_embeddings = torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1) if position_ids", "[What are input IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*):", "attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, labels=None, return_dict=None, ):", "= torch.cat( [torch.ones(input_modal_shape, device=device, dtype=torch.long), attention_mask], dim=1 ) if encoder_attention_mask", "to be added to Other Modality Embedding. [SEP] Most commonly", "of shape `(batch_size, sequence_length)`: Segment token indices to indicate first", "torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1) if position_ids is None: position_ids = torch.arange(seq_length,", "None else self.config.use_return_dict if input_ids is not None and inputs_embeds", "on padding token indices. Mask values selected in `[0, 1]`:", "last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) def get_input_embeddings(self): return self.embeddings.word_embeddings def", "encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid", "= return_dict if return_dict is not None else self.config.use_return_dict if", "(`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens", "in the range `[0, config.max_position_embeddings - 1]`. [What are position", "same time\") elif input_ids is not None: input_txt_shape = input_ids.size()", "input_txt_shape = input_ids.size() elif inputs_embeds is not None: input_txt_shape =", "`(batch_size, sequence_length)`: Mask to avoid performing attention on padding token", "as a decoder. Mask values selected in `[0, 1]`: -", "(`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Indices of positions of", "values selected in `[0, 1]`: - 1 indicates the head", "self.num_labels == 1: # We are doing regression loss_fct =", "shape `(batch_size,)`, *optional*): Optional start token to be added to", "start_token_embeds = self.word_embeddings(start_token) seq_length += 1 token_embeddings = torch.cat([start_token_embeds.unsqueeze(1), token_embeddings],", "input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence", "MMBTModel(config, transformer, encoder) self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size, config.num_labels)", "added as it's appended to the end of other modality", "self.embeddings.word_embeddings = value @add_start_docstrings( \"\"\" MMBT Model with a sequence", "None: token_type_ids = torch.zeros( (input_modal.size(0), seq_length), dtype=torch.long, device=input_modal.device ) position_embeddings", "the vocabulary. It does not expect [CLS] token to be", "regression loss is computed (Mean-Square loss), If `config.num_labels > 1`", "= r\"\"\" Args: input_modal (`torch.FloatTensor` of shape `(batch_size, ***)`): The", "None else inputs_embeds.device modal_embeddings = self.modal_encoder( input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids,", "transformer, encoder): super().__init__() self.num_labels = config.num_labels self.mmbt = MMBTModel(config, transformer,", "attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output =", "with the License. # You may obtain a copy of", "nullify selected heads of the self-attention modules. Mask values selected", "softmax, used to compute the weighted average in the self-attention", "for the output of each layer + the output of", "None: input_txt_shape = input_ids.size() elif inputs_embeds is not None: input_txt_shape", "encoder): super().__init__() self.num_labels = config.num_labels self.mmbt = MMBTModel(config, transformer, encoder)", "token_type_ids = torch.zeros( (input_modal.size(0), seq_length), dtype=torch.long, device=input_modal.device ) position_embeddings =", "avoid performing attention on padding token indices. Mask values selected", "encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = encoder_outputs[0] pooled_output =", "outputs[1] pooled_output = self.dropout(pooled_output) logits = self.classifier(pooled_output) loss = None", "should have embeddings, encoder, and pooler attributes. encoder (:class: *~nn.Module*):", "Indices can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`]", "you can choose to directly pass an embedded representation. This", "used in the cross-attention if the model is configured as", "or saving, resizing the input embeddings, pruning heads etc.) This", "position_ids = torch.arange(seq_length, dtype=torch.long, device=input_modal.device) position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if", "to convert `input_ids` indices into associated vectors than the model's", "a plain tuple. \"\"\" @add_start_docstrings( \"The bare MMBT Model outputting", "= value @add_start_docstrings( \"\"\" MMBT Model with a sequence classification/regression", "express or implied. # See the License for the specific", "(`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional start token to be", "[`PreTrainedModel`]. Check the superclass documentation for the generic methods the", "except in compliance with the License. # You may obtain", "be added to Other Modality Embedding. [SEP] Most commonly used.", "is **masked**. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, embedding_dim)`, *optional*):", ") input_modal_shape = modal_embeddings.size()[:-1] if token_type_ids is None: token_type_ids =", "A text transformer that is used by MMBT. It should", "is configured as a decoder. encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size,", "_CONFIG_FOR_DOC = \"MMBTConfig\" class ModalEmbeddings(nn.Module): \"\"\"Generic Modal Embeddings which takes", "any specific head on top.\", MMBT_START_DOCSTRING, ) class MMBTModel(nn.Module, ModuleUtilsMixin):", "modal_embeddings.size()[:-1] if token_type_ids is None: token_type_ids = torch.ones(input_txt_shape, dtype=torch.long, device=device)", "@add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None,", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "tokens in the vocabulary. It does not expect [CLS] token", "...modeling_outputs import BaseModelOutputWithPooling, SequenceClassifierOutput from ...modeling_utils import ModuleUtilsMixin from ...utils", "0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask)", "embedded representation. This is useful if you want more control", "None: position_ids = torch.arange(seq_length, dtype=torch.long, device=input_modal.device) position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length)", "inputs_embeds at the same time\") elif input_ids is not None:", "CONDITIONS OF ANY KIND, either express or implied. # See", "[What are attention masks?](../glossary#attention-mask) token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size,", "from ...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs import BaseModelOutputWithPooling,", "token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings = token_embeddings + position_embeddings + token_type_embeddings", "@replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None,", "return_dict=None, ): r\"\"\" Returns: Examples:: # For example purposes. Not", "lookup matrix. encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*):", "Mask values selected in `[0, 1]`: - 1 for tokens", "def __init__(self, config, transformer, encoder): super().__init__() self.num_labels = config.num_labels self.mmbt", "or not to return a [`~file_utils.ModelOutput`] instead of a plain", "pooled_output = outputs[1] pooled_output = self.dropout(pooled_output) logits = self.classifier(pooled_output) loss", "computed (Cross-Entropy). Returns: *Tuple* comprising various elements depending on the", "the model at the output of each layer plus the", "attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device, dtype=torch.long), attention_mask], dim=1 ) if", "encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r\"\"\" Returns: Examples:: #", "MSELoss() loss = loss_fct(logits.view(-1), labels.view(-1)) else: loss_fct = CrossEntropyLoss() loss", "= self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return embeddings MMBT_START_DOCSTRING = r\"\"\"", "return the attentions tensors of all attention layers. See `attentions`", "added to Other Modality Embedding. [SEP] Most commonly used. attention_mask", "the parameters of the model. Initializing with a config file", "`(batch_size, ***)`): The other modality data. It will be the", "else: attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device, dtype=torch.long), attention_mask], dim=1 )", "BaseModelOutputWithPooling, SequenceClassifierOutput from ...modeling_utils import ModuleUtilsMixin from ...utils import logging", "= inputs_embeds.size()[:-1] else: raise ValueError(\"You have to specify either input_ids", "text and other image encoders, and obtain state-of-the-art performance on", "downloading or saving, resizing the input embeddings, pruning heads etc.)", "layer on top of the pooled output) \"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING,", "head_mask=None, inputs_embeds=None, labels=None, return_dict=None, ): return_dict = return_dict if return_dict", "performing attention on padding token indices. Mask values selected in", "with a config file does not load the weights associated", "Whether or not to return the hidden states of all", "self.dropout = nn.Dropout(p=config.hidden_dropout_prob) def forward(self, input_modal, start_token=None, end_token=None, position_ids=None, token_type_ids=None):", "is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if", "input_shape, self.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs", "class MMBTModel(nn.Module, ModuleUtilsMixin): def __init__(self, config, transformer, encoder): super().__init__() self.config", "head_mask=None, inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r\"\"\" Returns:", "It should take in a batch of modal inputs and", "the superclass documentation for the generic methods the library implements", "r\"\"\" MMBT model was proposed in [Supervised Multimodal Bitransformers for", "Optional start token to be added to Other Modality Embedding.", "class MMBTForClassification(nn.Module): r\"\"\" **labels**: (*optional*) `torch.LongTensor` of shape `(batch_size,)`: Labels", "of shape `(batch_size, modal_sequence_length)`, *optional*): Indices of positions of each", "Embeddings which takes in an encoder, and a transformer embedding.\"\"\"", "affiliates. # Copyright (c) HuggingFace Inc. team. # # Licensed", "not None else self.config.use_return_dict if input_ids is not None and", "MMBT_START_DOCSTRING = r\"\"\" MMBT model was proposed in [Supervised Multimodal", "inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic", "encoder, and a transformer embedding.\"\"\" def __init__(self, config, encoder, embeddings):", "modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict, ) pooled_output = outputs[1] pooled_output =", "labels.view(-1)) else: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))", "None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is", "(Mean-Square loss), If `config.num_labels > 1` a classification loss is", "batch of modal inputs and return k, n dimension embeddings.", "indicates the head is **not masked**, - 0 indicates the", "self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None", "convert `input_ids` indices into associated vectors than the model's internal", ") return_dict = return_dict if return_dict is not None else", "= self.word_embeddings(start_token) seq_length += 1 token_embeddings = torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1)", "for that type expects. e.g. With an Image Encoder, the", "in `[0, 1]`: - 1 for tokens that are **not", "multimodal classification benchmark tasks. This model inherits from [`PreTrainedModel`]. Check", "not None: end_token_embeds = self.word_embeddings(end_token) seq_length += 1 token_embeddings =", "to compute the weighted average in the self-attention heads. Examples:", "Multimodal Bitransformers for Classifying Images and Text](https://github.com/facebookresearch/mmbt) by <NAME>, <NAME>,", "CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not return_dict: output", "be added as it's appended to the end of other", "None if labels is not None: if self.num_labels == 1:", "selected in `[0, 1]`: - 1 indicates the head is", "of all layers. See `hidden_states` under returned tensors for more", "pooled_output) + encoder_outputs[1:] return BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, )", "ValueError(\"You have to specify either input_ids or inputs_embeds\") device =", "a regression loss is computed (Mean-Square loss), If `config.num_labels >", "depending on the configuration (config) and inputs: **loss**: (*optional*, returned", "in an encoder, and a transformer embedding.\"\"\" def __init__(self, config,", "indices to indicate different portions of the non-text modality. The", "`(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the", "not None and inputs_embeds is not None: raise ValueError(\"You cannot", "position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) embedding_output = torch.cat([modal_embeddings, txt_embeddings], 1) input_shape", "head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs = self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask,", "shape `(batch_size,)`: Labels for computing the sequence classification/regression loss. Indices", "(Cross-Entropy). Returns: *Tuple* comprising various elements depending on the configuration", "None and inputs_embeds is not None: raise ValueError(\"You cannot specify", "pruning heads etc.) This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module)", "sequence tokens in the position embeddings. Selected in the range", "start_token is not None: start_token_embeds = self.word_embeddings(start_token) seq_length += 1", "will be summed with the respective token embeddings for the", "(before SoftMax). **hidden_states**: (*optional*, returned when `output_hidden_states=True`) list of `torch.FloatTensor`", "= outputs[1] pooled_output = self.dropout(pooled_output) logits = self.classifier(pooled_output) loss =", "self.num_labels), labels.view(-1)) if not return_dict: output = (logits,) + outputs[2:]", "@add_start_docstrings( \"The bare MMBT Model outputting raw hidden-states without any", "*optional*): Whether or not to return a [`~file_utils.ModelOutput`] instead of", "Copyright (c) Facebook, Inc. and its affiliates. # Copyright (c)", "= nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings = embeddings.position_embeddings self.token_type_embeddings = embeddings.token_type_embeddings self.word_embeddings", "None: token_type_ids = torch.ones(input_txt_shape, dtype=torch.long, device=device) txt_embeddings = self.transformer.embeddings( input_ids=input_ids,", "return_dict: return (sequence_output, pooled_output) + encoder_outputs[1:] return BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output,", "1 indicates the head is **not masked**, - 0 indicates", "# Copyright (c) Facebook, Inc. and its affiliates. # Copyright", "are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (*optional*) `torch.LongTensor` of", "not None: start_token_embeds = self.word_embeddings(start_token) seq_length += 1 token_embeddings =", "is None: encoder_attention_mask = torch.ones(input_shape, device=device) else: encoder_attention_mask = torch.cat(", "token indices to indicate different portions of the non-text modality.", "1` a classification loss is computed (Cross-Entropy). Returns: *Tuple* comprising", "are attention masks?](../glossary#attention-mask) token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, sequence_length)`:", "the pooled output) \"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, ) class MMBTForClassification(nn.Module): r\"\"\"", "the model. Initializing with a config file does not load", "embeddings from these tokens will be summed with the respective", "implements for all its model (such as downloading or saving,", "torch.ones(input_shape, device=device) else: attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device, dtype=torch.long), attention_mask],", "if config.num_labels==1) loss. **logits**: `torch.FloatTensor` of shape `(batch_size, config.num_labels)` Classification", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "input_ids is not None else inputs_embeds.device modal_embeddings = self.modal_encoder( input_modal,", "= self.dropout(pooled_output) logits = self.classifier(pooled_output) loss = None if labels", "input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None,", "initial embedding outputs. **attentions**: (*optional*, returned when `output_attentions=True`) list of", "attention softmax, used to compute the weighted average in the", "instead of passing `input_ids` you can choose to directly pass", "portions of the inputs. Indices are selected in `[0, 1]`:", "outputs[:2] ```\"\"\" def __init__(self, config, transformer, encoder): super().__init__() self.num_labels =", "__init__(self, config, transformer, encoder): super().__init__() self.num_labels = config.num_labels self.mmbt =", "config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of", "= ImageEncoder(args) mmbt = MMBTModel(config, transformer, encoder) \"\"\" output_attentions =", "indices into associated vectors than the model's internal embedding lookup", "from text and other image encoders, and obtain state-of-the-art performance", "tuple. \"\"\" @add_start_docstrings( \"The bare MMBT Model outputting raw hidden-states", "token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None, output_hidden_states=None,", "output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states", "[What are position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of shape `(num_heads,)` or", "token_type_ids is None: token_type_ids = torch.ones(input_txt_shape, dtype=torch.long, device=device) txt_embeddings =", "logging logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = \"MMBTConfig\" class ModalEmbeddings(nn.Module): \"\"\"Generic", "...modeling_utils import ModuleUtilsMixin from ...utils import logging logger = logging.get_logger(__name__)", "= self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings = token_embeddings + position_embeddings", "Version 2.0 (the \"License\"); # you may not use this", "[`~file_utils.ModelOutput`] instead of a plain tuple. \"\"\" @add_start_docstrings( \"The bare", "If `config.num_labels == 1` a regression loss is computed (Mean-Square", "for the non-text modality. Selected in the range `[0, config.max_position_embeddings", "are token type IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size,", "MMBT. It should have embeddings, encoder, and pooler attributes. encoder", "used. attention_mask (*optional*) `torch.FloatTensor` of shape `(batch_size, sequence_length)`: Mask to", "if labels is not None: if self.num_labels == 1: #", "= BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) model = MMBTForClassification(config, transformer, encoder)", "under the License. \"\"\"PyTorch MMBT model. \"\"\" import torch from", "modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, labels=None,", "Embedding. [SEP] Most commonly used. attention_mask (*optional*) `torch.FloatTensor` of shape", "**not masked**, - 0 for tokens that are **masked**. [What", "Inc. and its affiliates. # Copyright (c) HuggingFace Inc. team.", "multimodal bitransformer model that fuses information from text and other", "config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor` of", "by applicable law or agreed to in writing, software #", "is used in the cross-attention if the model is configured", "a *sentence A* token, - 1 corresponds to a *sentence", "head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict, ) pooled_output = outputs[1] pooled_output = self.dropout(pooled_output)", "`(batch_size, sequence_length, hidden_size)`: Hidden-states of the model at the output", "ModuleUtilsMixin from ...utils import logging logger = logging.get_logger(__name__) _CONFIG_FOR_DOC =", "masked**, - 0 for tokens that are **masked**. [What are", "encoder (:class: *~nn.Module*): Encoder for the second modality. It should", "of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads,", "not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict", "dim=1) if end_token is not None: end_token_embeds = self.word_embeddings(end_token) seq_length", "of the model. Initializing with a config file does not", "to the end of other modality embeddings. Indices can be", "is useful if you want more control over how to", "self.config = config self.transformer = transformer self.modal_encoder = ModalEmbeddings(config, encoder,", "embedding_output = torch.cat([modal_embeddings, txt_embeddings], 1) input_shape = embedding_output.size()[:-1] if attention_mask", "be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details.", "= self.word_embeddings(end_token) seq_length += 1 token_embeddings = torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1)", "(one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`:", "+ output) if loss is not None else output return", "import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs import BaseModelOutputWithPooling, SequenceClassifierOutput from", "if input_ids is not None else inputs_embeds.device modal_embeddings = self.modal_encoder(", "+= 1 token_embeddings = torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1) if end_token is", "the generic methods the library implements for all its model", "return_dict=return_dict, ) pooled_output = outputs[1] pooled_output = self.dropout(pooled_output) logits =", "transformer embedding.\"\"\" def __init__(self, config, encoder, embeddings): super().__init__() self.config =", "self.mmbt = MMBTModel(config, transformer, encoder) self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier =", "**masked**. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, embedding_dim)`, *optional*): Optionally,", "input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask,", "- 1]`. [What are position IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor` of shape", "(`torch.FloatTensor` of shape `(batch_size, sequence_length, embedding_dim)`, *optional*): Optionally, instead of", "token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, labels=None, return_dict=None, ): return_dict", "applicable law or agreed to in writing, software # distributed", "output_hidden_states = ( output_hidden_states if output_hidden_states is not None else", "configuration class with all the parameters of the model. Initializing", "nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size, config.num_labels) def forward( self, input_modal, input_ids=None,", "token_type_ids is None: token_type_ids = torch.zeros( (input_modal.size(0), seq_length), dtype=torch.long, device=input_modal.device", "cannot specify both input_ids and inputs_embeds at the same time\")", "obtain state-of-the-art performance on various multimodal classification benchmark tasks. This", "the non-text modality. The embeddings from these tokens will be", "self.config.num_hidden_layers) encoder_outputs = self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions,", "See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids)", "to return a [`~file_utils.ModelOutput`] instead of a plain tuple. \"\"\"", "if the model is configured as a decoder. encoder_attention_mask (`torch.FloatTensor`", "matter related to general usage and behavior. Parameters: config ([`MMBTConfig`]):", "for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids", "hidden_size)`: Hidden-states of the model at the output of each", "bare MMBT Model outputting raw hidden-states without any specific head", "position_embeddings = self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings = token_embeddings +", "Copyright (c) HuggingFace Inc. team. # # Licensed under the", "# You may obtain a copy of the License at", "is not None and inputs_embeds is not None: raise ValueError(\"You", "indices of the encoder input. This mask is used in", "`(batch_size, sequence_length)`, *optional*): Indices of positions of each input sequence", "SoftMax). **hidden_states**: (*optional*, returned when `output_hidden_states=True`) list of `torch.FloatTensor` (one", "B* token. [What are token type IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*) `torch.LongTensor`", "`(batch_size, modal_sequence_length)`: Segment token indices to indicate different portions of", "(*optional*) `torch.LongTensor` of shape `(batch_size,)`: Labels for computing the sequence", "`torch.FloatTensor` (one for the output of each layer + the", "is not None: end_token_embeds = self.word_embeddings(end_token) seq_length += 1 token_embeddings", "on various multimodal classification benchmark tasks. This model inherits from", "if output_hidden_states is not None else self.config.output_hidden_states ) return_dict =", "ImageEncoder(args) model = MMBTForClassification(config, transformer, encoder) outputs = model(input_modal, input_ids,", "(*optional*) `torch.LongTensor` of shape `(batch_size, sequence_length)`: Segment token indices to", "specify both input_ids and inputs_embeds at the same time\") elif", "= nn.Linear(config.hidden_size, config.num_labels) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None,", "information from text and other image encoders, and obtain state-of-the-art", "all its model (such as downloading or saving, resizing the", "embeddings.position_embeddings self.token_type_embeddings = embeddings.token_type_embeddings self.word_embeddings = embeddings.word_embeddings self.LayerNorm = embeddings.LayerNorm", "end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, ) input_modal_shape = modal_embeddings.size()[:-1] if token_type_ids is", "to specify either input_ids or inputs_embeds\") device = input_ids.device if", "data. It will be the shape that the encoder for", "obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What", "transformer, encoder): super().__init__() self.config = config self.transformer = transformer self.modal_encoder", "ModalEmbeddings(nn.Module): \"\"\"Generic Modal Embeddings which takes in an encoder, and", "if the model is configured as a decoder. Mask values", "from ...utils import logging logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = \"MMBTConfig\"", "output_hidden_states=None, return_dict=None, ): r\"\"\" Returns: Examples:: # For example purposes.", "BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) model = MMBTForClassification(config, transformer, encoder) outputs", "position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if token_type_ids is None: token_type_ids = torch.zeros( (input_modal.size(0),", "each input sequence tokens in the position embeddings for the", "output of the last layer of the encoder. Used in", "indices to indicate first and second portions of the inputs.", "= MSELoss() loss = loss_fct(logits.view(-1), labels.view(-1)) else: loss_fct = CrossEntropyLoss()", "model is configured as a decoder. Mask values selected in", "Parameters: config ([`MMBTConfig`]): Model configuration class with all the parameters", "of passing `input_ids` you can choose to directly pass an", "def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None,", "return_dict is not None else self.config.use_return_dict outputs = self.mmbt( input_modal=input_modal,", "detail. return_dict (`bool`, *optional*): Whether or not to return a", "attention on the padding token indices of the encoder input.", "model. Initializing with a config file does not load the", "[`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) modal_start_tokens", "first and second portions of the inputs. Indices are selected", "\"\"\" import torch from torch import nn from torch.nn import", "hidden-states at the output of the last layer of the", "`(1,)`: Classification (or regression if config.num_labels==1) loss. **logits**: `torch.FloatTensor` of", "\"The bare MMBT Model outputting raw hidden-states without any specific", "torch from torch import nn from torch.nn import CrossEntropyLoss, MSELoss", "of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention", "modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None,", "\"License\"); # you may not use this file except in", "`(batch_size,)`, *optional*): Optional start token to be added to Other", "attention layers. See `attentions` under returned tensors for more detail.", "is configured as a decoder. Mask values selected in `[0,", "\"\"\"Generic Modal Embeddings which takes in an encoder, and a", "range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) head_mask", "is not None else inputs_embeds.device modal_embeddings = self.modal_encoder( input_modal, start_token=modal_start_tokens,", "to directly pass an embedded representation. This is useful if", "list of `torch.FloatTensor` (one for the output of each layer", "more control over how to convert `input_ids` indices into associated", "embeddings for the non-text modality. Selected in the range `[0,", "loss_fct = MSELoss() loss = loss_fct(logits.view(-1), labels.view(-1)) else: loss_fct =", "\"\"\" @add_start_docstrings( \"The bare MMBT Model outputting raw hidden-states without", "+ token_type_embeddings embeddings = self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return embeddings", "Args: input_modal (`torch.FloatTensor` of shape `(batch_size, ***)`): The other modality", "expects. e.g. With an Image Encoder, the shape would be", "have embeddings, encoder, and pooler attributes. encoder (:class: *~nn.Module*): Encoder", "configuration (config) and inputs: **loss**: (*optional*, returned when `labels` is", "shape `(batch_size, sequence_length, embedding_dim)`, *optional*): Optionally, instead of passing `input_ids`", "also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular", "encoder_attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device), encoder_attention_mask], dim=1 ) extended_attention_mask =", "self.position_embeddings = embeddings.position_embeddings self.token_type_embeddings = embeddings.token_type_embeddings self.word_embeddings = embeddings.word_embeddings self.LayerNorm", "model = MMBTForClassification(config, transformer, encoder) outputs = model(input_modal, input_ids, labels=labels)", "(*optional*, returned when `output_hidden_states=True`) list of `torch.FloatTensor` (one for the", "loss is not None else output return SequenceClassifierOutput( loss=loss, logits=logits,", "add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs import BaseModelOutputWithPooling, SequenceClassifierOutput from ...modeling_utils", "the self-attention heads. Examples: ```python # For example purposes. Not", "of the last layer of the encoder. Used in the", "for details. [What are input IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor` of shape", "are position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers,", "is None: token_type_ids = torch.ones(input_txt_shape, dtype=torch.long, device=device) txt_embeddings = self.transformer.embeddings(" ]
[ "from eth2.beacon.validation import ( validate_slot, ) if TYPE_CHECKING: from eth2.beacon.state_machines.base", "self.get_chaindb_class()(base_db) # # Helpers # @classmethod def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']:", ") state, imported_block = self.get_state_machine(base_block_for_import).import_block(block) # Validate the imported block.", "get_state_machine_class(cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: \"\"\" Returns the ``StateMachine`` instance", "the requested block as specified by block hash. Raise ``BlockNotFound``", "with the given number in the canonical chain. \"\"\" return", "self.chaindb.persist_state(state) ( new_canonical_blocks, old_canonical_blocks, ) = self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK:", "parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: pass @abstractmethod def get_block_by_root(self,", "block.previous_block_root, ) ) base_block_for_import = self.create_block_from_parent( parent_block, FromBlockParams(), ) state,", ") -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: pass class BeaconChain(BaseBeaconChain):", "@abstractmethod def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: pass @abstractmethod def", "eth2.beacon.types.states import ( BeaconState, ) from eth2.beacon.typing import ( FromBlockParams,", "def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]: pass # # Chain API #", "= self.get_canonical_head() return self.create_block_from_parent(head, FromBlockParams()) else: return block def get_block(self)", "BlockClassError, StateMachineNotFound, ) from eth2.beacon.types.blocks import ( BaseBeaconBlock, ) from", "( ABC, abstractmethod, ) import logging from typing import (", "sm_class = self.get_state_machine_class_for_block_slot(block.slot) return sm_class( chaindb=self.chaindb, block=block, ) # #", "_from_genesis_block(cls, base_db: BaseAtomicDB, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the", "-> Hash32: pass @abstractmethod def import_block( self, block: BaseBeaconBlock, perform_validation:", "Chain API # @classmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState,", "block with the given number in the canonical chain. \"\"\"", "import_block( self, block: BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock,", "parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: \"\"\" Passthrough helper to", ") chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db, genesis_block) @classmethod def", "just persists all state. Should design how to clean up", "not set\") return cls.chaindb_class # # Chain API # @classmethod", "chain. \"\"\" validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self, slot: Slot) ->", "block_root: Hash32) -> int: \"\"\" Return the score of the", "Each ``StateMachine`` is associated with a range of slots. The", "# Block API # def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]:", "block_root: Hash32) -> Type[BaseBeaconBlock]: slot = self.chaindb.get_slot_by_root(block_root) sm_class = self.get_state_machine_class_for_block_slot(slot)", "Machine API # @classmethod @abstractmethod def get_state_machine_class( cls, block: BaseBeaconBlock)", "\"\"\" Initialize the ``BeaconChain`` from a genesis state. \"\"\" sm_class", "the StateMachines in sm_configuration\") validate_slot(slot) for start_slot, sm_class in reversed(cls.sm_configuration):", "import ( ABC, abstractmethod, ) import logging from typing import", "number. \"\"\" if cls.sm_configuration is None: raise AttributeError(\"Chain classes must", "the ``StateMachine`` instance for the given block slot number. \"\"\"", "`sm_configuration`\" ) else: # TODO implment validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration) pass", "cls.chaindb_class is None: raise AttributeError(\"`chaindb_class` not set\") return cls.chaindb_class #", "slot: #{0}\".format(slot)) def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': \"\"\" Return", "self.chaindb.get_score(block_root) def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: \"\"\" Return ``block``", "return sm_class raise StateMachineNotFound(\"No StateMachine available for block slot: #{0}\".format(slot))", "from the genesis block. \"\"\" chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__)", "Slot) -> Type['BaseBeaconStateMachine']: \"\"\" Return the ``StateMachine`` class for the", "block is None: head = self.get_canonical_head() return self.create_block_from_parent(head, FromBlockParams()) else:", "self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK: slot %s | signed root %s',", "Validate the imported block. if perform_validation: validate_imported_block_unchanged(imported_block, block) # TODO:", ") from eth.db.backends.base import ( BaseAtomicDB, ) from eth.exceptions import", "BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from", "``StateMachine`` class for the given block slot number. \"\"\" if", "int: \"\"\" Return the score of the block with the", "-> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_root(self, slot: Slot) -> Hash32:", "...] chain_id = None # type: int # # Helpers", "bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: pass class", "block with the given hash. Raise ``BlockNotFound`` if there is", "range of slots. The Chain class acts as a wrapper", "import ( ValidationError, encode_hex, ) from eth2._utils.ssz import ( validate_imported_block_unchanged,", "chaindb_class = None # type: Type[BaseBeaconChainDB] sm_configuration = None #", "if slot >= start_slot: return sm_class raise StateMachineNotFound(\"No StateMachine available", "Block API # @abstractmethod def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]:", "chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db, genesis_block) @classmethod def _from_genesis_block(cls,", "the given block slot number. \"\"\" if cls.sm_configuration is None:", "Type[BaseBeaconStateMachine]], ...] chain_id = None # type: int # #", "\"\"\" return self.chaindb.get_score(block_root) def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: \"\"\"", "with an empty `sm_configuration`\" ) else: # TODO implment validate_sm_configuration(self.sm_configuration)", "State Machine API # @classmethod @abstractmethod def get_state_machine_class( cls, block:", "a combination of one or more ``StateMachine`` classes. Each ``StateMachine``", "block_params.slot is None else block_params.slot, ).create_block_from_parent(parent_block, block_params) def get_block_by_root(self, block_root:", "base_db: BaseAtomicDB, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain``", "Now it just persists all state. Should design how to", "Hash32) -> Type[BaseBeaconBlock]: pass @abstractmethod def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params:", "Type['BaseBeaconStateMachine']: \"\"\" Return the ``StateMachine`` class for the given block", "Type[BaseBeaconChainDB] def __init__(self, base_db: BaseAtomicDB) -> None: if not self.sm_configuration:", "def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: pass # #", "at {}\".format( block.slot, block.signed_root, block.previous_block_root, ) ) base_block_for_import = self.create_block_from_parent(", "-> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock:", "are now part of the canonical chain. - a tuple", "were canonical and now are no longer canonical. \"\"\" try:", "- the imported block - a tuple of blocks which", "abstractmethod, ) import logging from typing import ( TYPE_CHECKING, Tuple,", "@abstractmethod def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: pass @abstractmethod def", "# Chain API # @classmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state:", "eth.exceptions import ( BlockNotFound, ) from eth.validation import ( validate_word,", "( BlockClassError, StateMachineNotFound, ) from eth2.beacon.types.blocks import ( BaseBeaconBlock, )", "def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: \"\"\" Return ``block`` if", "get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: \"\"\" Return the requested block", "the canonical chain. - a tuple of blocks which were", "get_canonical_head(self) -> BaseBeaconBlock: pass @abstractmethod def get_score(self, block_root: Hash32) ->", "base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': pass #", "``StateMachine`` is associated with a range of slots. The Chain", "pass @abstractmethod def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: pass @abstractmethod", "get_state_machine_class( cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: pass @abstractmethod def get_state_machine(self,", "Type, ) from eth._utils.datatypes import ( Configurable, ) from eth.db.backends.base", "the given block number. \"\"\" block = self.ensure_block(at_block) sm_class =", "self.get_state_machine_class_for_block_slot(block.slot) return sm_class( chaindb=self.chaindb, block=block, ) # # Block API", "# @classmethod @abstractmethod def get_state_machine_class( cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']:", "return block_class def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock:", "hash. Raise ``BlockNotFound`` if there's no block with the given", "the genesis block. \"\"\" chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__) return", "sm_configuration\") validate_slot(slot) for start_slot, sm_class in reversed(cls.sm_configuration): if slot >=", "# Block API # @abstractmethod def get_block_class(self, block_root: Hash32) ->", "return sm_class( chaindb=self.chaindb, block=block, ) # # Block API #", "there's no head defined for the canonical chain. \"\"\" block_root", "block: BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock,", "ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: \"\"\" Return ``block`` if it", "def get_block(self) -> BaseBeaconBlock: \"\"\" Return the current TIP block.", "Slot) -> BaseBeaconBlock: \"\"\" Return the block with the given", "an empty `sm_configuration`\" ) else: # TODO implment validate_sm_configuration(self.sm_configuration) #", "in the canonical chain. \"\"\" return self.chaindb.get_canonical_block_root(slot) def import_block( self,", "noqa: F401 BaseBeaconStateMachine, ) class BaseBeaconChain(Configurable, ABC): \"\"\" The base", "block descending from the given block. \"\"\" return self.get_state_machine_class_for_block_slot( slot=parent_block.slot", "Slot) -> Hash32: pass @abstractmethod def import_block( self, block: BaseBeaconBlock,", "get_block(self) -> BaseBeaconBlock: \"\"\" Return the current TIP block. \"\"\"", "StateMachineNotFound(\"No StateMachine available for block slot: #{0}\".format(slot)) def get_state_machine(self, at_block:", "def get_canonical_block_root(self, slot: Slot) -> Hash32: \"\"\" Return the block", "-> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: \"\"\" Import a complete", "self.chaindb.get_canonical_block_root(slot) def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True ) ->", "Return the block with the given number in the canonical", "= cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block) != sm_class.block_class: raise BlockClassError( \"Given genesis", ") base_block_for_import = self.create_block_from_parent( parent_block, FromBlockParams(), ) state, imported_block =", "if not self.sm_configuration: raise ValueError( \"The Chain class cannot be", "with the given number in the canonical chain. Raise ``BlockNotFound``", "\"\"\" validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self, slot: Slot) -> Hash32:", "<filename>eth2/beacon/chains/base.py from abc import ( ABC, abstractmethod, ) import logging", "pass @abstractmethod def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True )", "ABC): \"\"\" The base class for all BeaconChain objects \"\"\"", "\"\"\" chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__) return cls(base_db) # #", "block_class def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: \"\"\"", "@classmethod @abstractmethod def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]: pass # # Chain", "block slot number. \"\"\" if cls.sm_configuration is None: raise AttributeError(\"Chain", "logging from typing import ( TYPE_CHECKING, Tuple, Type, ) from", "canonical chain. Raise ``BlockNotFound`` if there's no block with the", "return cls._from_genesis_block(base_db, genesis_block) @classmethod def _from_genesis_block(cls, base_db: BaseAtomicDB, genesis_block: BaseBeaconBlock)", "not ``None``, otherwise return the block of the canonical head.", "class of the block descending from the given block. \"\"\"", "BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from a genesis", "Cannot import block {} before importing \" \"its parent block", "eth2._utils.ssz import ( validate_imported_block_unchanged, ) from eth2.beacon.db.chain import ( BaseBeaconChainDB,", "\"\"\" Return the score of the block with the given", "block def get_block(self) -> BaseBeaconBlock: \"\"\" Return the current TIP", "@classmethod def _from_genesis_block(cls, base_db: BaseAtomicDB, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\"", "sm_class.block_class: raise BlockClassError( \"Given genesis block class: {}, StateMachine.block_class: {}\".format(", "encode_hex, ) from eth2._utils.ssz import ( validate_imported_block_unchanged, ) from eth2.beacon.db.chain", "\"\"\" chaindb = None # type: BaseBeaconChainDB chaindb_class = None", "validate_imported_block_unchanged(imported_block, block) # TODO: Now it just persists all state.", "@abstractmethod def get_score(self, block_root: Hash32) -> int: pass @abstractmethod def", "-> int: pass @abstractmethod def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock:", "# # Helpers # @classmethod def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']: if", "state. Should design how to clean up the old state.", "is not ``None``, otherwise return the block of the canonical", "if TYPE_CHECKING: from eth2.beacon.state_machines.base import ( # noqa: F401 BaseBeaconStateMachine,", "for the given block slot number. \"\"\" if cls.sm_configuration is", "number in the canonical chain. Raise ``BlockNotFound`` if there's no", "slot=parent_block.slot + 1 if block_params.slot is None else block_params.slot, ).create_block_from_parent(parent_block,", "eth_typing import ( Hash32, ) from eth_utils import ( ValidationError,", "canonical chain. \"\"\" return self.chaindb.get_canonical_block_root(slot) def import_block( self, block: BaseBeaconBlock,", "BaseBeaconStateMachine, ) class BaseBeaconChain(Configurable, ABC): \"\"\" The base class for", "before importing \" \"its parent block at {}\".format( block.slot, block.signed_root,", "if perform_validation: validate_imported_block_unchanged(imported_block, block) # TODO: Now it just persists", "block of the canonical head. \"\"\" if block is None:", "BaseBeaconChainDB, BeaconChainDB, ) from eth2.beacon.exceptions import ( BlockClassError, StateMachineNotFound, )", "class BaseBeaconChain(Configurable, ABC): \"\"\" The base class for all BeaconChain", "no matching black hash. \"\"\" return self.chaindb.get_score(block_root) def ensure_block(self, block:", "block_root: Hash32) -> BaseBeaconBlock: \"\"\" Return the requested block as", "None: raise AttributeError(\"Chain classes must define the StateMachines in sm_configuration\")", "API # @classmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block:", "\"\"\" sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block) != sm_class.block_class: raise BlockClassError(", "pass @abstractmethod def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': pass @classmethod", "imported block - a tuple of blocks which are now", "BaseBeaconBlock: \"\"\" Return ``block`` if it is not ``None``, otherwise", "slot: Slot) -> Type['BaseBeaconStateMachine']: pass # # Block API #", "no longer canonical. \"\"\" try: parent_block = self.get_block_by_root(block.previous_block_root) except BlockNotFound:", "-> BaseBeaconBlock: pass @abstractmethod def get_block(self) -> BaseBeaconBlock: pass @abstractmethod", "block. \"\"\" chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__) return cls(base_db) #", "the score of the block with the given hash. Raise", "( BaseBeaconBlock, ) from eth2.beacon.types.states import ( BeaconState, ) from", "self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_score(self, block_root: Hash32) -> int:", "import ( # noqa: F401 BaseBeaconStateMachine, ) class BaseBeaconChain(Configurable, ABC):", "'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from the genesis block. \"\"\"", "cls(base_db) # # StateMachine API # @classmethod def get_state_machine_class(cls, block:", "\"\"\" block_root = self.chaindb.get_canonical_head_root() block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class)", "| signed root %s', imported_block.slot, encode_hex(imported_block.signed_root), ) return imported_block, new_canonical_blocks,", "chain. - a tuple of blocks which were canonical and", "= self.ensure_block(at_block) sm_class = self.get_state_machine_class_for_block_slot(block.slot) return sm_class( chaindb=self.chaindb, block=block, )", "the imported block - a tuple of blocks which are", "at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': \"\"\" Return the ``StateMachine`` instance for", "eth2.beacon.typing import ( FromBlockParams, Slot, ) from eth2.beacon.validation import (", "base class for all BeaconChain objects \"\"\" chaindb = None", "get_score(self, block_root: Hash32) -> int: pass @abstractmethod def ensure_block(self, block:", "= self.chaindb.get_canonical_head_root() block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_score(self,", "# noqa: F401 BaseBeaconStateMachine, ) class BaseBeaconChain(Configurable, ABC): \"\"\" The", "canonical. \"\"\" try: parent_block = self.get_block_by_root(block.previous_block_root) except BlockNotFound: raise ValidationError(", "given hash. Raise ``BlockNotFound`` if there is no matching black", "( BaseBeaconChainDB, BeaconChainDB, ) from eth2.beacon.exceptions import ( BlockClassError, StateMachineNotFound,", "BaseBeaconBlock, ) from eth2.beacon.types.states import ( BeaconState, ) from eth2.beacon.typing", "block at the canonical chain head. Raise ``CanonicalHeadNotFound`` if there's", "the block descending from the given block. \"\"\" return self.get_state_machine_class_for_block_slot(", "self.chaindb.get_block_by_root(block_root, block_class) def get_canonical_head(self) -> BaseBeaconBlock: \"\"\" Return the block", "BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': \"\"\" Return the ``StateMachine`` instance for the", "@abstractmethod def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: pass @abstractmethod def", "@abstractmethod def get_canonical_head(self) -> BaseBeaconBlock: pass @abstractmethod def get_score(self, block_root:", "Hash32: pass @abstractmethod def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True", ") from eth2.beacon.db.chain import ( BaseBeaconChainDB, BeaconChainDB, ) from eth2.beacon.exceptions", "-> BaseBeaconBlock: pass @abstractmethod def get_canonical_head(self) -> BaseBeaconBlock: pass @abstractmethod", ") from eth.exceptions import ( BlockNotFound, ) from eth.validation import", "# @classmethod def get_state_machine_class(cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: \"\"\" Returns", "\"Attempt to import block #{}. Cannot import block {} before", "Type[BaseBeaconChainDB] sm_configuration = None # type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...] chain_id", "BaseBeaconBlock: pass @abstractmethod def get_block(self) -> BaseBeaconBlock: pass @abstractmethod def", "block number. \"\"\" block = self.ensure_block(at_block) sm_class = self.get_state_machine_class_for_block_slot(block.slot) return", "'IMPORTED_BLOCK: slot %s | signed root %s', imported_block.slot, encode_hex(imported_block.signed_root), )", "slot: Slot) -> Hash32: \"\"\" Return the block hash with", "``BlockNotFound`` if there's no block with the given hash in", "genesis block. \"\"\" chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__) return cls(base_db)", "None # type: int # # Helpers # @classmethod @abstractmethod", "is None: raise AttributeError(\"Chain classes must define the StateMachines in", "self.create_block_from_parent(head, FromBlockParams()) else: return block def get_block(self) -> BaseBeaconBlock: \"\"\"", ").create_block_from_parent(parent_block, block_params) def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: \"\"\" Return", "parent_block = self.get_block_by_root(block.previous_block_root) except BlockNotFound: raise ValidationError( \"Attempt to import", "block as specified by block hash. Raise ``BlockNotFound`` if there's", "-> Type['BaseBeaconStateMachine']: pass @abstractmethod def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine':", "now are no longer canonical. \"\"\" try: parent_block = self.get_block_by_root(block.previous_block_root)", "Type['BaseBeaconStateMachine']: \"\"\" Returns the ``StateMachine`` instance for the given block", "else: # TODO implment validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration) pass self.chaindb =", "def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': \"\"\" Return the ``StateMachine``", "raise AttributeError(\"Chain classes must define the StateMachines in sm_configuration\") validate_slot(slot)", "the block of the canonical head. \"\"\" if block is", "-> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from the genesis block.", "\"\"\" Return the block at the canonical chain head. Raise", "AttributeError(\"`chaindb_class` not set\") return cls.chaindb_class # # Chain API #", "Helpers # @classmethod @abstractmethod def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]: pass #", "old_canonical_blocks, ) = self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK: slot %s |", "\"\"\" return self.get_state_machine().block def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: \"\"\"", "Initialize the ``BeaconChain`` from a genesis state. \"\"\" sm_class =", "with the given hash in the db. \"\"\" validate_word(block_root, title=\"Block", "for start_slot, sm_class in reversed(cls.sm_configuration): if slot >= start_slot: return", "block=block, ) # # Block API # def get_block_class(self, block_root:", "return self.get_state_machine_class_for_block_slot( slot=parent_block.slot + 1 if block_params.slot is None else", "descending from the given block. \"\"\" return self.get_state_machine_class_for_block_slot( slot=parent_block.slot +", "get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: pass @abstractmethod def create_block_from_parent(self, parent_block:", "@abstractmethod def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True ) ->", "blocks which are now part of the canonical chain. -", "ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: pass @abstractmethod def get_block(self) ->", "canonical head. \"\"\" if block is None: head = self.get_canonical_head()", "all BeaconChain objects \"\"\" chaindb = None # type: BaseBeaconChainDB", "TIP block. \"\"\" return self.get_state_machine().block def get_canonical_block_by_slot(self, slot: Slot) ->", "are no longer canonical. \"\"\" try: parent_block = self.get_block_by_root(block.previous_block_root) except", "# Chain API # @classmethod @abstractmethod def from_genesis(cls, base_db: BaseAtomicDB,", "# State Machine API # @classmethod @abstractmethod def get_state_machine_class( cls,", "given block number. \"\"\" block = self.ensure_block(at_block) sm_class = self.get_state_machine_class_for_block_slot(block.slot)", "import ( TYPE_CHECKING, Tuple, Type, ) from eth._utils.datatypes import (", "genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from a", "with the given number in the canonical chain. \"\"\" validate_slot(slot)", "slot = self.chaindb.get_slot_by_root(block_root) sm_class = self.get_state_machine_class_for_block_slot(slot) block_class = sm_class.block_class return", "chain_id = None # type: int # # Helpers #", "BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: \"\"\" Passthrough helper to the", "get_canonical_block_root(self, slot: Slot) -> Hash32: \"\"\" Return the block hash", "instantiated with an empty `sm_configuration`\" ) else: # TODO implment", "the given number in the canonical chain. Raise ``BlockNotFound`` if", "all state. Should design how to clean up the old", "self.get_state_machine_class_for_block_slot( slot=parent_block.slot + 1 if block_params.slot is None else block_params.slot,", "from eth2.beacon.db.chain import ( BaseBeaconChainDB, BeaconChainDB, ) from eth2.beacon.exceptions import", "-> Type['BaseBeaconStateMachine']: \"\"\" Return the ``StateMachine`` class for the given", "raise BlockClassError( \"Given genesis block class: {}, StateMachine.block_class: {}\".format( type(genesis_block),", "cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: \"\"\"", "slot: Slot) -> BaseBeaconBlock: \"\"\" Return the block with the", "head. \"\"\" if block is None: head = self.get_canonical_head() return", "The Chain class acts as a wrapper around these other", "eth2.beacon.state_machines.base import ( # noqa: F401 BaseBeaconStateMachine, ) class BaseBeaconChain(Configurable,", "persists all state. Should design how to clean up the", "validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration) pass self.chaindb = self.get_chaindb_class()(base_db) # # Helpers", "in the canonical chain. Raise ``BlockNotFound`` if there's no block", "the canonical chain. Raise ``BlockNotFound`` if there's no block with", "cls, slot: Slot) -> Type['BaseBeaconStateMachine']: pass # # Block API", "BeaconChainDB, ) from eth2.beacon.exceptions import ( BlockClassError, StateMachineNotFound, ) from", ") from eth2._utils.ssz import ( validate_imported_block_unchanged, ) from eth2.beacon.db.chain import", "Type[BaseBeaconBlock]: slot = self.chaindb.get_slot_by_root(block_root) sm_class = self.get_state_machine_class_for_block_slot(slot) block_class = sm_class.block_class", "# validate_sm_configuration(self.sm_configuration) pass self.chaindb = self.get_chaindb_class()(base_db) # # Helpers #", "chain. Raise ``BlockNotFound`` if there's no block with the given", "\"\"\" A Chain is a combination of one or more", "chaindb=self.chaindb, block=block, ) # # Block API # def get_block_class(self,", "slot: Slot) -> Type['BaseBeaconStateMachine']: \"\"\" Return the ``StateMachine`` class for", "Hash32) -> int: pass @abstractmethod def ensure_block(self, block: BaseBeaconBlock=None) ->", "``StateMachine`` instance for the given block slot number. \"\"\" return", "implment validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration) pass self.chaindb = self.get_chaindb_class()(base_db) # #", "BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: pass @abstractmethod def get_block_by_root(self, block_root:", "cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__) return cls(base_db) # # StateMachine API #", "if there's no head defined for the canonical chain. \"\"\"", "and now are no longer canonical. \"\"\" try: parent_block =", "BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': pass # #", ") from eth2.beacon.validation import ( validate_slot, ) if TYPE_CHECKING: from", "self.get_block_by_root(block.previous_block_root) except BlockNotFound: raise ValidationError( \"Attempt to import block #{}.", "block. \"\"\" return self.get_state_machine().block def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock:", "validate_word, ) from eth_typing import ( Hash32, ) from eth_utils", "not self.sm_configuration: raise ValueError( \"The Chain class cannot be instantiated", "from a genesis state. \"\"\" sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block)", "type(genesis_block) != sm_class.block_class: raise BlockClassError( \"Given genesis block class: {},", "# @classmethod def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']: if cls.chaindb_class is None:", "of the canonical chain. - a tuple of blocks which", "{} before importing \" \"its parent block at {}\".format( block.slot,", "= cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__) return cls(base_db) # # StateMachine API", "\"Given genesis block class: {}, StateMachine.block_class: {}\".format( type(genesis_block), sm_class.block_class )", "state, imported_block = self.get_state_machine(base_block_for_import).import_block(block) # Validate the imported block. if", "-> 'BaseBeaconStateMachine': pass @classmethod @abstractmethod def get_state_machine_class_for_block_slot( cls, slot: Slot)", "block.signed_root, block.previous_block_root, ) ) base_block_for_import = self.create_block_from_parent( parent_block, FromBlockParams(), )", "set\") return cls.chaindb_class # # Chain API # @classmethod def", "the block at the canonical chain head. Raise ``CanonicalHeadNotFound`` if", "Initialize the ``BeaconChain`` from the genesis block. \"\"\" chaindb =", "BlockNotFound: raise ValidationError( \"Attempt to import block #{}. Cannot import", "None # type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...] chain_id = None #", "there is no matching black hash. \"\"\" return self.chaindb.get_score(block_root) def", "for block slot: #{0}\".format(slot)) def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine':", "-> Type[BaseBeaconBlock]: slot = self.chaindb.get_slot_by_root(block_root) sm_class = self.get_state_machine_class_for_block_slot(slot) block_class =", "'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from a genesis state. \"\"\"", "@classmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) ->", "# @abstractmethod def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: pass @abstractmethod", "from eth2.beacon.state_machines.base import ( # noqa: F401 BaseBeaconStateMachine, ) class", "up the old state. self.chaindb.persist_state(state) ( new_canonical_blocks, old_canonical_blocks, ) =", "no block with the given hash in the db. \"\"\"", "return cls.chaindb_class # # Chain API # @classmethod def from_genesis(cls,", "FromBlockParams) -> BaseBeaconBlock: pass @abstractmethod def get_block_by_root(self, block_root: Hash32) ->", "= self.create_block_from_parent( parent_block, FromBlockParams(), ) state, imported_block = self.get_state_machine(base_block_for_import).import_block(block) #", "type: int # # Helpers # @classmethod @abstractmethod def get_chaindb_class(cls)", "eth2.beacon.validation import ( validate_slot, ) if TYPE_CHECKING: from eth2.beacon.state_machines.base import", "get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_root(self, slot:", "as specified by block hash. Raise ``BlockNotFound`` if there's no", "there's no block with the given hash in the db.", "1 if block_params.slot is None else block_params.slot, ).create_block_from_parent(parent_block, block_params) def", "be instantiated with an empty `sm_configuration`\" ) else: # TODO", "Slot, ) from eth2.beacon.validation import ( validate_slot, ) if TYPE_CHECKING:", "-> 'BaseBeaconChain': pass # # State Machine API # @classmethod", "genesis block class: {}, StateMachine.block_class: {}\".format( type(genesis_block), sm_class.block_class ) )", "raise ValidationError( \"Attempt to import block #{}. Cannot import block", "a 3-tuple - the imported block - a tuple of", "-> int: \"\"\" Return the score of the block with", "must define the StateMachines in sm_configuration\") validate_slot(slot) for start_slot, sm_class", "``BlockNotFound`` if there is no matching black hash. \"\"\" return", "Type[BaseBeaconChainDB]: pass # # Chain API # @classmethod @abstractmethod def", "class BeaconChain(BaseBeaconChain): \"\"\" A Chain is a combination of one", "of the block descending from the given block. \"\"\" return", "= None # type: Type[BaseBeaconChainDB] sm_configuration = None # type:", "otherwise return the block of the canonical head. \"\"\" if", "BaseAtomicDB, ) from eth.exceptions import ( BlockNotFound, ) from eth.validation", "to the ``StateMachine`` class of the block descending from the", "to clean up the old state. self.chaindb.persist_state(state) ( new_canonical_blocks, old_canonical_blocks,", "...], Tuple[BaseBeaconBlock, ...]]: \"\"\" Import a complete block and returns", "Slot) -> Hash32: \"\"\" Return the block hash with the", "{}\".format( block.slot, block.signed_root, block.previous_block_root, ) ) base_block_for_import = self.create_block_from_parent( parent_block,", "canonical chain. \"\"\" validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self, slot: Slot)", "block with the given number in the canonical chain. Raise", "eth2.beacon.types.blocks import ( BaseBeaconBlock, ) from eth2.beacon.types.states import ( BeaconState,", "import logging from typing import ( TYPE_CHECKING, Tuple, Type, )", "import ( validate_slot, ) if TYPE_CHECKING: from eth2.beacon.state_machines.base import (", "= self.get_block_by_root(block.previous_block_root) except BlockNotFound: raise ValidationError( \"Attempt to import block", "tuple of blocks which were canonical and now are no", "Raise ``BlockNotFound`` if there's no block with the given number", "operations to the appropriate StateMachine depending on the current block", "\"\"\" if block is None: head = self.get_canonical_head() return self.create_block_from_parent(head,", "return self.create_block_from_parent(head, FromBlockParams()) else: return block def get_block(self) -> BaseBeaconBlock:", "from eth2.beacon.types.blocks import ( BaseBeaconBlock, ) from eth2.beacon.types.states import (", "block hash with the given number in the canonical chain.", "how to clean up the old state. self.chaindb.persist_state(state) ( new_canonical_blocks,", "sm_class.block_class return block_class def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) ->", "pass # # Chain API # @classmethod @abstractmethod def from_genesis(cls,", "Type['BaseBeaconChainDB']: if cls.chaindb_class is None: raise AttributeError(\"`chaindb_class` not set\") return", "complete block and returns a 3-tuple - the imported block", "BaseBeaconChain(Configurable, ABC): \"\"\" The base class for all BeaconChain objects", "@abstractmethod def get_block(self) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_by_slot(self, slot:", ") = self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK: slot %s | signed", "get_chaindb_class(cls) -> Type[BaseBeaconChainDB]: pass # # Chain API # @classmethod", "# type: BaseBeaconChainDB chaindb_class = None # type: Type[BaseBeaconChainDB] sm_configuration", "-> BaseBeaconBlock: pass @abstractmethod def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock:", ") import logging from typing import ( TYPE_CHECKING, Tuple, Type,", "sm_class = self.get_state_machine_class_for_block_slot(slot) block_class = sm_class.block_class return block_class def create_block_from_parent(self,", "Hash32) -> Type[BaseBeaconBlock]: slot = self.chaindb.get_slot_by_root(block_root) sm_class = self.get_state_machine_class_for_block_slot(slot) block_class", "( validate_imported_block_unchanged, ) from eth2.beacon.db.chain import ( BaseBeaconChainDB, BeaconChainDB, )", "'BaseBeaconChain': pass # # State Machine API # @classmethod @abstractmethod", "def get_state_machine_class(cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: \"\"\" Returns the ``StateMachine``", "now part of the canonical chain. - a tuple of", "blocks which were canonical and now are no longer canonical.", "get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': pass @classmethod @abstractmethod def get_state_machine_class_for_block_slot(", "current block slot number. \"\"\" logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class =", "empty `sm_configuration`\" ) else: # TODO implment validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration)", "block_params.slot, ).create_block_from_parent(parent_block, block_params) def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: \"\"\"", "the given block slot number. \"\"\" return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def", "BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': pass @classmethod @abstractmethod def get_state_machine_class_for_block_slot( cls, slot:", "if there is no matching black hash. \"\"\" return self.chaindb.get_score(block_root)", "@abstractmethod def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: pass", "BaseBeaconBlock: pass @abstractmethod def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: pass", "@classmethod @abstractmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: pass", "block and returns a 3-tuple - the imported block -", "-> BaseBeaconBlock: \"\"\" Return the requested block as specified by", "and returns a 3-tuple - the imported block - a", "def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: pass @abstractmethod", "logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class = BeaconChainDB # type: Type[BaseBeaconChainDB] def __init__(self, base_db:", "abc import ( ABC, abstractmethod, ) import logging from typing", "\"The Chain class cannot be instantiated with an empty `sm_configuration`\"", "if cls.chaindb_class is None: raise AttributeError(\"`chaindb_class` not set\") return cls.chaindb_class", "ValueError( \"The Chain class cannot be instantiated with an empty", "= self.get_chaindb_class()(base_db) # # Helpers # @classmethod def get_chaindb_class(cls) ->", "( FromBlockParams, Slot, ) from eth2.beacon.validation import ( validate_slot, )", "around these other StateMachine classes, delegating operations to the appropriate", "the db. \"\"\" validate_word(block_root, title=\"Block Hash\") block_class = self.get_block_class(block_root) return", "@abstractmethod def get_canonical_block_root(self, slot: Slot) -> Hash32: pass @abstractmethod def", "sm_class in reversed(cls.sm_configuration): if slot >= start_slot: return sm_class raise", "pass @abstractmethod def get_canonical_block_root(self, slot: Slot) -> Hash32: pass @abstractmethod", "perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: pass", "\"\"\" Return the ``StateMachine`` instance for the given block number.", "Return the current TIP block. \"\"\" return self.get_state_machine().block def get_canonical_block_by_slot(self,", "return self.chaindb.get_canonical_block_root(slot) def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True )", "None else block_params.slot, ).create_block_from_parent(parent_block, block_params) def get_block_by_root(self, block_root: Hash32) ->", "None: if not self.sm_configuration: raise ValueError( \"The Chain class cannot", "BaseBeaconBlock: \"\"\" Passthrough helper to the ``StateMachine`` class of the", "from eth.validation import ( validate_word, ) from eth_typing import (", "def get_state_machine_class( cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: pass @abstractmethod def", "BaseBeaconBlock: pass @abstractmethod def get_canonical_block_root(self, slot: Slot) -> Hash32: pass", "None: raise AttributeError(\"`chaindb_class` not set\") return cls.chaindb_class # # Chain", "block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_score(self, block_root: Hash32)", "# # Helpers # @classmethod @abstractmethod def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]:", "get_chaindb_class(cls) -> Type['BaseBeaconChainDB']: if cls.chaindb_class is None: raise AttributeError(\"`chaindb_class` not", "the given number in the canonical chain. \"\"\" return self.chaindb.get_canonical_block_root(slot)", "number in the canonical chain. \"\"\" validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def", "genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from the", "TODO: Now it just persists all state. Should design how", "Return the ``StateMachine`` class for the given block slot number.", "``None``, otherwise return the block of the canonical head. \"\"\"", "a wrapper around these other StateMachine classes, delegating operations to", "\"\"\" The base class for all BeaconChain objects \"\"\" chaindb", "def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock,", "FromBlockParams()) else: return block def get_block(self) -> BaseBeaconBlock: \"\"\" Return", "the block with the given number in the canonical chain.", "from eth2.beacon.types.states import ( BeaconState, ) from eth2.beacon.typing import (", "\"\"\" block = self.ensure_block(at_block) sm_class = self.get_state_machine_class_for_block_slot(block.slot) return sm_class( chaindb=self.chaindb,", "get_canonical_head(self) -> BaseBeaconBlock: \"\"\" Return the block at the canonical", "( new_canonical_blocks, old_canonical_blocks, ) = self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK: slot", "the ``StateMachine`` class for the given block slot number. \"\"\"", "from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\"", "chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__) return cls(base_db) # # StateMachine", "# type: Type[BaseBeaconChainDB] sm_configuration = None # type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]],", "\"\"\" return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) ->", "-> BaseBeaconBlock: \"\"\" Return the block with the given number", "as a wrapper around these other StateMachine classes, delegating operations", "@classmethod def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']: if cls.chaindb_class is None: raise", "sm_class raise StateMachineNotFound(\"No StateMachine available for block slot: #{0}\".format(slot)) def", "BaseBeaconBlock: \"\"\" Return the requested block as specified by block", "given number in the canonical chain. \"\"\" validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot))", "the canonical chain head. Raise ``CanonicalHeadNotFound`` if there's no head", "int: pass @abstractmethod def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: pass", "with the given hash. Raise ``BlockNotFound`` if there is no", "\"\"\" Return ``block`` if it is not ``None``, otherwise return", "the canonical chain. \"\"\" block_root = self.chaindb.get_canonical_head_root() block_class = self.get_block_class(block_root)", "genesis_block.__class__) return cls(base_db) # # StateMachine API # @classmethod def", "``BlockNotFound`` if there's no block with the given number in", "longer canonical. \"\"\" try: parent_block = self.get_block_by_root(block.previous_block_root) except BlockNotFound: raise", "\"\"\" Returns the ``StateMachine`` instance for the given block slot", "-> BaseBeaconBlock: \"\"\" Return the block at the canonical chain", "self.chaindb.get_slot_by_root(block_root) sm_class = self.get_state_machine_class_for_block_slot(slot) block_class = sm_class.block_class return block_class def", "signed root %s', imported_block.slot, encode_hex(imported_block.signed_root), ) return imported_block, new_canonical_blocks, old_canonical_blocks", "start_slot, sm_class in reversed(cls.sm_configuration): if slot >= start_slot: return sm_class", "at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': pass @classmethod @abstractmethod def get_state_machine_class_for_block_slot( cls,", "validate_slot, ) if TYPE_CHECKING: from eth2.beacon.state_machines.base import ( # noqa:", ") from eth2.beacon.typing import ( FromBlockParams, Slot, ) from eth2.beacon.validation", "Hash32) -> int: \"\"\" Return the score of the block", "import ( Configurable, ) from eth.db.backends.base import ( BaseAtomicDB, )", "chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db, genesis_block) @classmethod def _from_genesis_block(cls, base_db: BaseAtomicDB, genesis_block:", "BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the", "from eth._utils.datatypes import ( Configurable, ) from eth.db.backends.base import (", "import ( validate_word, ) from eth_typing import ( Hash32, )", "( # noqa: F401 BaseBeaconStateMachine, ) class BaseBeaconChain(Configurable, ABC): \"\"\"", "else block_params.slot, ).create_block_from_parent(parent_block, block_params) def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock:", "block: BaseBeaconBlock=None) -> BaseBeaconBlock: \"\"\" Return ``block`` if it is", "the block hash with the given number in the canonical", "tuple of blocks which are now part of the canonical", "BaseBeaconBlock: pass @abstractmethod def get_canonical_head(self) -> BaseBeaconBlock: pass @abstractmethod def", "def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: pass @abstractmethod def get_block(self)", "block_class) def get_canonical_head(self) -> BaseBeaconBlock: \"\"\" Return the block at", "BaseBeaconBlock: pass @abstractmethod def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: pass", "self.get_state_machine().block def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: \"\"\" Return the", "base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize", "the imported block. if perform_validation: validate_imported_block_unchanged(imported_block, block) # TODO: Now", "-> Type[BaseBeaconChainDB]: pass # # Chain API # @classmethod @abstractmethod", "the given hash in the db. \"\"\" validate_word(block_root, title=\"Block Hash\")", "the given hash. Raise ``BlockNotFound`` if there is no matching", "pass @abstractmethod def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: pass @abstractmethod", "StateMachine classes, delegating operations to the appropriate StateMachine depending on", "# # Chain API # @classmethod def from_genesis(cls, base_db: BaseAtomicDB,", "typing import ( TYPE_CHECKING, Tuple, Type, ) from eth._utils.datatypes import", "block_root = self.chaindb.get_canonical_head_root() block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def", "TYPE_CHECKING, Tuple, Type, ) from eth._utils.datatypes import ( Configurable, )", "number. \"\"\" block = self.ensure_block(at_block) sm_class = self.get_state_machine_class_for_block_slot(block.slot) return sm_class(", "defined for the canonical chain. \"\"\" block_root = self.chaindb.get_canonical_head_root() block_class", "BaseBeaconBlock: \"\"\" Return the block at the canonical chain head.", "current TIP block. \"\"\" return self.get_state_machine().block def get_canonical_block_by_slot(self, slot: Slot)", "reversed(cls.sm_configuration): if slot >= start_slot: return sm_class raise StateMachineNotFound(\"No StateMachine", "class: {}, StateMachine.block_class: {}\".format( type(genesis_block), sm_class.block_class ) ) chaindb =", "= self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK: slot %s | signed root", "Slot) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_root(self, slot: Slot) ->", "slot number. \"\"\" return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def get_state_machine_class_for_block_slot( cls, slot:", "if it is not ``None``, otherwise return the block of", "#{0}\".format(slot)) def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': \"\"\" Return the", "StateMachineNotFound, ) from eth2.beacon.types.blocks import ( BaseBeaconBlock, ) from eth2.beacon.types.states", "slot: Slot) -> Hash32: pass @abstractmethod def import_block( self, block:", "start_slot: return sm_class raise StateMachineNotFound(\"No StateMachine available for block slot:", "\"\"\" return self.get_state_machine_class_for_block_slot( slot=parent_block.slot + 1 if block_params.slot is None", "cls.chaindb_class # # Chain API # @classmethod def from_genesis(cls, base_db:", "self.get_state_machine_class_for_block_slot(slot) block_class = sm_class.block_class return block_class def create_block_from_parent(self, parent_block: BaseBeaconBlock,", "BaseAtomicDB, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from", "BaseBeaconBlock: \"\"\" Return the current TIP block. \"\"\" return self.get_state_machine().block", "-> Hash32: \"\"\" Return the block hash with the given", "validate_word(block_root, title=\"Block Hash\") block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def", ") class BaseBeaconChain(Configurable, ABC): \"\"\" The base class for all", "eth_utils import ( ValidationError, encode_hex, ) from eth2._utils.ssz import (", "pass # # Block API # @abstractmethod def get_block_class(self, block_root:", "# Helpers # @classmethod @abstractmethod def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]: pass", "of slots. The Chain class acts as a wrapper around", "-> Type[BaseBeaconBlock]: pass @abstractmethod def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams)", "F401 BaseBeaconStateMachine, ) class BaseBeaconChain(Configurable, ABC): \"\"\" The base class", "imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK: slot %s | signed root %s', imported_block.slot,", "Block API # def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: slot", "def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: pass @abstractmethod def create_block_from_parent(self,", "BaseBeaconChainDB chaindb_class = None # type: Type[BaseBeaconChainDB] sm_configuration = None", "block = self.ensure_block(at_block) sm_class = self.get_state_machine_class_for_block_slot(block.slot) return sm_class( chaindb=self.chaindb, block=block,", "in the db. \"\"\" validate_word(block_root, title=\"Block Hash\") block_class = self.get_block_class(block_root)", "there's no block with the given number in the canonical", "API # @classmethod def get_state_machine_class(cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: \"\"\"", "'BaseBeaconStateMachine': \"\"\" Return the ``StateMachine`` instance for the given block", "# Validate the imported block. if perform_validation: validate_imported_block_unchanged(imported_block, block) #", "logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class = BeaconChainDB # type: Type[BaseBeaconChainDB] def", "depending on the current block slot number. \"\"\" logger =", "self.chaindb.get_block_by_root(block_root, block_class) def get_score(self, block_root: Hash32) -> int: \"\"\" Return", "int # # Helpers # @classmethod @abstractmethod def get_chaindb_class(cls) ->", "FromBlockParams(), ) state, imported_block = self.get_state_machine(base_block_for_import).import_block(block) # Validate the imported", "if cls.sm_configuration is None: raise AttributeError(\"Chain classes must define the", "= self.get_state_machine_class_for_block_slot(slot) block_class = sm_class.block_class return block_class def create_block_from_parent(self, parent_block:", "given block slot number. \"\"\" if cls.sm_configuration is None: raise", "except BlockNotFound: raise ValidationError( \"Attempt to import block #{}. Cannot", "chaindb = None # type: BaseBeaconChainDB chaindb_class = None #", "sm_configuration = None # type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...] chain_id =", "base_db: BaseAtomicDB) -> None: if not self.sm_configuration: raise ValueError( \"The", "given number in the canonical chain. Raise ``BlockNotFound`` if there's", "cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block) != sm_class.block_class: raise BlockClassError( \"Given genesis block", "{}, StateMachine.block_class: {}\".format( type(genesis_block), sm_class.block_class ) ) chaindb = cls.get_chaindb_class()(db=base_db)", "of blocks which are now part of the canonical chain.", "is None else block_params.slot, ).create_block_from_parent(parent_block, block_params) def get_block_by_root(self, block_root: Hash32)", "( BaseAtomicDB, ) from eth.exceptions import ( BlockNotFound, ) from", "hash. Raise ``BlockNotFound`` if there is no matching black hash.", "\"\"\" Passthrough helper to the ``StateMachine`` class of the block", "number in the canonical chain. \"\"\" return self.chaindb.get_canonical_block_root(slot) def import_block(", "old state. self.chaindb.persist_state(state) ( new_canonical_blocks, old_canonical_blocks, ) = self.chaindb.persist_block(imported_block, imported_block.__class__)", "slots. The Chain class acts as a wrapper around these", "block.slot, block.signed_root, block.previous_block_root, ) ) base_block_for_import = self.create_block_from_parent( parent_block, FromBlockParams(),", "specified by block hash. Raise ``BlockNotFound`` if there's no block", "slot: Slot) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_root(self, slot: Slot)", "block_class) def get_score(self, block_root: Hash32) -> int: \"\"\" Return the", "head defined for the canonical chain. \"\"\" block_root = self.chaindb.get_canonical_head_root()", "block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_canonical_head(self) -> BaseBeaconBlock:", "\"\"\" Import a complete block and returns a 3-tuple -", ") else: # TODO implment validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration) pass self.chaindb", "@abstractmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: pass #", "eth2.beacon.exceptions import ( BlockClassError, StateMachineNotFound, ) from eth2.beacon.types.blocks import (", "for the given block number. \"\"\" block = self.ensure_block(at_block) sm_class", "the current TIP block. \"\"\" return self.get_state_machine().block def get_canonical_block_by_slot(self, slot:", "the old state. self.chaindb.persist_state(state) ( new_canonical_blocks, old_canonical_blocks, ) = self.chaindb.persist_block(imported_block,", "matching black hash. \"\"\" return self.chaindb.get_score(block_root) def ensure_block(self, block: BaseBeaconBlock=None)", "wrapper around these other StateMachine classes, delegating operations to the", "+ 1 if block_params.slot is None else block_params.slot, ).create_block_from_parent(parent_block, block_params)", "pass # # State Machine API # @classmethod @abstractmethod def", "get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: slot = self.chaindb.get_slot_by_root(block_root) sm_class =", "the canonical chain. \"\"\" validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self, slot:", "# def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: slot = self.chaindb.get_slot_by_root(block_root)", "def get_canonical_block_root(self, slot: Slot) -> Hash32: pass @abstractmethod def import_block(", "( validate_slot, ) if TYPE_CHECKING: from eth2.beacon.state_machines.base import ( #", "eth._utils.datatypes import ( Configurable, ) from eth.db.backends.base import ( BaseAtomicDB,", "class for all BeaconChain objects \"\"\" chaindb = None #", "a tuple of blocks which were canonical and now are", "Hash32: \"\"\" Return the block hash with the given number", "API # @classmethod @abstractmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState,", "number. \"\"\" logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class = BeaconChainDB # type:", "canonical chain. - a tuple of blocks which were canonical", "the canonical head. \"\"\" if block is None: head =", "Import a complete block and returns a 3-tuple - the", "self.chaindb = self.get_chaindb_class()(base_db) # # Helpers # @classmethod def get_chaindb_class(cls)", "to the appropriate StateMachine depending on the current block slot", "helper to the ``StateMachine`` class of the block descending from", "``BeaconChain`` from a genesis state. \"\"\" sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot) if", "canonical chain. \"\"\" block_root = self.chaindb.get_canonical_head_root() block_class = self.get_block_class(block_root) return", "A Chain is a combination of one or more ``StateMachine``", "return self.get_state_machine().block def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: \"\"\" Return", ">= start_slot: return sm_class raise StateMachineNotFound(\"No StateMachine available for block", "pass self.chaindb = self.get_chaindb_class()(base_db) # # Helpers # @classmethod def", "Return the ``StateMachine`` instance for the given block number. \"\"\"", "the appropriate StateMachine depending on the current block slot number.", "from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': pass", "return self.chaindb.get_block_by_root(block_root, block_class) def get_score(self, block_root: Hash32) -> int: \"\"\"", "import block {} before importing \" \"its parent block at", "import ( BlockClassError, StateMachineNotFound, ) from eth2.beacon.types.blocks import ( BaseBeaconBlock,", "= self.get_state_machine(base_block_for_import).import_block(block) # Validate the imported block. if perform_validation: validate_imported_block_unchanged(imported_block,", "get_canonical_block_root(self, slot: Slot) -> Hash32: pass @abstractmethod def import_block( self,", "def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: slot = self.chaindb.get_slot_by_root(block_root) sm_class", "chain. \"\"\" block_root = self.chaindb.get_canonical_head_root() block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root,", "import ( BaseAtomicDB, ) from eth.exceptions import ( BlockNotFound, )", "which were canonical and now are no longer canonical. \"\"\"", "{}\".format( type(genesis_block), sm_class.block_class ) ) chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return", "block_root: Hash32) -> Type[BaseBeaconBlock]: pass @abstractmethod def create_block_from_parent(self, parent_block: BaseBeaconBlock,", "block_params) def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: \"\"\" Return the", "genesis_block) @classmethod def _from_genesis_block(cls, base_db: BaseAtomicDB, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain':", "# @classmethod @abstractmethod def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]: pass # #", "these other StateMachine classes, delegating operations to the appropriate StateMachine", "classes must define the StateMachines in sm_configuration\") validate_slot(slot) for start_slot,", "the ``StateMachine`` instance for the given block number. \"\"\" block", "bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: \"\"\" Import", "returns a 3-tuple - the imported block - a tuple", "( Hash32, ) from eth_utils import ( ValidationError, encode_hex, )", "appropriate StateMachine depending on the current block slot number. \"\"\"", "-> None: if not self.sm_configuration: raise ValueError( \"The Chain class", "block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: pass @abstractmethod def get_state_machine(self, at_block: BaseBeaconBlock=None)", "delegating operations to the appropriate StateMachine depending on the current", "AttributeError(\"Chain classes must define the StateMachines in sm_configuration\") validate_slot(slot) for", "block_root: Hash32) -> int: pass @abstractmethod def ensure_block(self, block: BaseBeaconBlock=None)", "in sm_configuration\") validate_slot(slot) for start_slot, sm_class in reversed(cls.sm_configuration): if slot", ") from eth._utils.datatypes import ( Configurable, ) from eth.db.backends.base import", "perform_validation: validate_imported_block_unchanged(imported_block, block) # TODO: Now it just persists all", "#{}. Cannot import block {} before importing \" \"its parent", "import ( BlockNotFound, ) from eth.validation import ( validate_word, )", "# TODO implment validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration) pass self.chaindb = self.get_chaindb_class()(base_db)", "db. \"\"\" validate_word(block_root, title=\"Block Hash\") block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root,", "it just persists all state. Should design how to clean", "from eth2.beacon.exceptions import ( BlockClassError, StateMachineNotFound, ) from eth2.beacon.types.blocks import", "imported block. if perform_validation: validate_imported_block_unchanged(imported_block, block) # TODO: Now it", "``CanonicalHeadNotFound`` if there's no head defined for the canonical chain.", "self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_canonical_head(self) -> BaseBeaconBlock: \"\"\" Return", "- a tuple of blocks which are now part of", "cannot be instantiated with an empty `sm_configuration`\" ) else: #", "design how to clean up the old state. self.chaindb.persist_state(state) (", "block #{}. Cannot import block {} before importing \" \"its", ") # # Block API # def get_block_class(self, block_root: Hash32)", "the block with the given hash. Raise ``BlockNotFound`` if there", "genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': pass # # State", "@classmethod @abstractmethod def get_state_machine_class( cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: pass", "= sm_class.block_class return block_class def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams)", "# type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...] chain_id = None # type:", "pass @abstractmethod def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: pass @abstractmethod", "\"\"\" Return the requested block as specified by block hash.", "hash with the given number in the canonical chain. Raise", "chaindb.persist_block(genesis_block, genesis_block.__class__) return cls(base_db) # # StateMachine API # @classmethod", "no block with the given number in the canonical chain.", ") -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: \"\"\" Import a", "validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self, slot: Slot) -> Hash32: \"\"\"", "Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: pass class BeaconChain(BaseBeaconChain): \"\"\" A Chain", "-> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from a genesis state.", "state. \"\"\" sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block) != sm_class.block_class: raise", "\"\"\" try: parent_block = self.get_block_by_root(block.previous_block_root) except BlockNotFound: raise ValidationError( \"Attempt", "Tuple, Type, ) from eth._utils.datatypes import ( Configurable, ) from", "\"\"\" return self.chaindb.get_canonical_block_root(slot) def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True", "Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: \"\"\" Import a complete block", "is None: head = self.get_canonical_head() return self.create_block_from_parent(head, FromBlockParams()) else: return", "BaseBeaconBlock: pass @abstractmethod def get_score(self, block_root: Hash32) -> int: pass", "BeaconState, ) from eth2.beacon.typing import ( FromBlockParams, Slot, ) from", "canonical and now are no longer canonical. \"\"\" try: parent_block", "instance for the given block slot number. \"\"\" return cls.get_state_machine_class_for_block_slot(block.slot)", "block_root: Hash32) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_head(self) -> BaseBeaconBlock:", "Chain API # @classmethod @abstractmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state:", "type(genesis_block), sm_class.block_class ) ) chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db,", "block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: \"\"\" Returns the ``StateMachine`` instance for", "def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': pass @classmethod @abstractmethod def", "validate_slot(slot) for start_slot, sm_class in reversed(cls.sm_configuration): if slot >= start_slot:", ") ) base_block_for_import = self.create_block_from_parent( parent_block, FromBlockParams(), ) state, imported_block", "= None # type: int # # Helpers # @classmethod", "import ( validate_imported_block_unchanged, ) from eth2.beacon.db.chain import ( BaseBeaconChainDB, BeaconChainDB,", "self.chaindb.get_canonical_head_root() block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_score(self, block_root:", "block. \"\"\" return self.get_state_machine_class_for_block_slot( slot=parent_block.slot + 1 if block_params.slot is", "title=\"Block Hash\") block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_canonical_head(self)", "def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: \"\"\" Return the block", "Helpers # @classmethod def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']: if cls.chaindb_class is", "Chain class cannot be instantiated with an empty `sm_configuration`\" )", "pass @classmethod @abstractmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']:", "Passthrough helper to the ``StateMachine`` class of the block descending", "a range of slots. The Chain class acts as a", "the given block. \"\"\" return self.get_state_machine_class_for_block_slot( slot=parent_block.slot + 1 if", "= self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_score(self, block_root: Hash32) ->", "acts as a wrapper around these other StateMachine classes, delegating", "from eth.exceptions import ( BlockNotFound, ) from eth.validation import (", "pass @abstractmethod def get_block(self) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_by_slot(self,", "genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': pass # # State Machine API", "slot >= start_slot: return sm_class raise StateMachineNotFound(\"No StateMachine available for", "if there's no block with the given hash in the", "class acts as a wrapper around these other StateMachine classes,", "objects \"\"\" chaindb = None # type: BaseBeaconChainDB chaindb_class =", "block with the given hash in the db. \"\"\" validate_word(block_root,", "TYPE_CHECKING: from eth2.beacon.state_machines.base import ( # noqa: F401 BaseBeaconStateMachine, )", "if block_params.slot is None else block_params.slot, ).create_block_from_parent(parent_block, block_params) def get_block_by_root(self,", "block. if perform_validation: validate_imported_block_unchanged(imported_block, block) # TODO: Now it just", "cls.sm_configuration is None: raise AttributeError(\"Chain classes must define the StateMachines", "def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_head(self)", "combination of one or more ``StateMachine`` classes. Each ``StateMachine`` is", "Return the block at the canonical chain head. Raise ``CanonicalHeadNotFound``", "def get_canonical_head(self) -> BaseBeaconBlock: \"\"\" Return the block at the", "( BlockNotFound, ) from eth.validation import ( validate_word, ) from", "sm_class( chaindb=self.chaindb, block=block, ) # # Block API # def", "return self.chaindb.get_block_by_root(block_root, block_class) def get_canonical_head(self) -> BaseBeaconBlock: \"\"\" Return the", "def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: \"\"\" Return the requested", "``StateMachine`` class of the block descending from the given block.", "self.logger.debug( 'IMPORTED_BLOCK: slot %s | signed root %s', imported_block.slot, encode_hex(imported_block.signed_root),", "@abstractmethod def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]: pass # # Chain API", "self.ensure_block(at_block) sm_class = self.get_state_machine_class_for_block_slot(block.slot) return sm_class( chaindb=self.chaindb, block=block, ) #", "-> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: pass class BeaconChain(BaseBeaconChain): \"\"\"", "classes, delegating operations to the appropriate StateMachine depending on the", "__init__(self, base_db: BaseAtomicDB) -> None: if not self.sm_configuration: raise ValueError(", "Type['BaseBeaconStateMachine']: pass # # Block API # @abstractmethod def get_block_class(self,", "'BaseBeaconStateMachine': pass @classmethod @abstractmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) ->", "block_params: FromBlockParams) -> BaseBeaconBlock: pass @abstractmethod def get_block_by_root(self, block_root: Hash32)", "@classmethod def get_state_machine_class(cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: \"\"\" Returns the", "cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db, genesis_block) @classmethod def _from_genesis_block(cls, base_db: BaseAtomicDB,", "Chain class acts as a wrapper around these other StateMachine", "type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...] chain_id = None # type: int", "create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: \"\"\" Passthrough helper", "return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self, slot: Slot) -> Hash32: \"\"\" Return", "pass @abstractmethod def get_canonical_head(self) -> BaseBeaconBlock: pass @abstractmethod def get_score(self,", "at the canonical chain head. Raise ``CanonicalHeadNotFound`` if there's no", "BaseBeaconBlock) -> 'BaseBeaconChain': pass # # State Machine API #", "parent block at {}\".format( block.slot, block.signed_root, block.previous_block_root, ) ) base_block_for_import", "head. Raise ``CanonicalHeadNotFound`` if there's no head defined for the", "get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: \"\"\" Return the block with", "# # Block API # def get_block_class(self, block_root: Hash32) ->", "StateMachine API # @classmethod def get_state_machine_class(cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']:", "def get_score(self, block_root: Hash32) -> int: pass @abstractmethod def ensure_block(self,", "Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: pass class BeaconChain(BaseBeaconChain): \"\"\" A", "return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']:", "chain head. Raise ``CanonicalHeadNotFound`` if there's no head defined for", "from eth_typing import ( Hash32, ) from eth_utils import (", "return block def get_block(self) -> BaseBeaconBlock: \"\"\" Return the current", "define the StateMachines in sm_configuration\") validate_slot(slot) for start_slot, sm_class in", "class cannot be instantiated with an empty `sm_configuration`\" ) else:", "available for block slot: #{0}\".format(slot)) def get_state_machine(self, at_block: BaseBeaconBlock=None) ->", "@abstractmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) ->", ") from eth_typing import ( Hash32, ) from eth_utils import", "requested block as specified by block hash. Raise ``BlockNotFound`` if", "Return the block hash with the given number in the", "self.get_canonical_head() return self.create_block_from_parent(head, FromBlockParams()) else: return block def get_block(self) ->", "BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from the genesis", "Chain is a combination of one or more ``StateMachine`` classes.", "( Configurable, ) from eth.db.backends.base import ( BaseAtomicDB, ) from", "Hash\") block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_canonical_head(self) ->", "self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self, slot: Slot) -> Hash32: \"\"\" Return the", "- a tuple of blocks which were canonical and now", "= self.chaindb.get_slot_by_root(block_root) sm_class = self.get_state_machine_class_for_block_slot(slot) block_class = sm_class.block_class return block_class", "block at {}\".format( block.slot, block.signed_root, block.previous_block_root, ) ) base_block_for_import =", "parent_block, FromBlockParams(), ) state, imported_block = self.get_state_machine(base_block_for_import).import_block(block) # Validate the", "the canonical chain. \"\"\" return self.chaindb.get_canonical_block_root(slot) def import_block( self, block:", "( BeaconState, ) from eth2.beacon.typing import ( FromBlockParams, Slot, )", "BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: pass @abstractmethod def get_state_machine(self, at_block: BaseBeaconBlock=None) ->", "is a combination of one or more ``StateMachine`` classes. Each", "block - a tuple of blocks which are now part", "sm_class.block_class ) ) chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db, genesis_block)", "-> 'BaseBeaconStateMachine': \"\"\" Return the ``StateMachine`` instance for the given", "part of the canonical chain. - a tuple of blocks", "BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': pass # # State Machine", "clean up the old state. self.chaindb.persist_state(state) ( new_canonical_blocks, old_canonical_blocks, )", "for all BeaconChain objects \"\"\" chaindb = None # type:", "block_class = sm_class.block_class return block_class def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params:", "from the given block. \"\"\" return self.get_state_machine_class_for_block_slot( slot=parent_block.slot + 1", "( validate_word, ) from eth_typing import ( Hash32, ) from", "block slot number. \"\"\" logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class = BeaconChainDB", "= self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_canonical_head(self) -> BaseBeaconBlock: \"\"\"", "get_score(self, block_root: Hash32) -> int: \"\"\" Return the score of", "black hash. \"\"\" return self.chaindb.get_score(block_root) def ensure_block(self, block: BaseBeaconBlock=None) ->", "Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...] chain_id = None # type: int #", "# # Chain API # @classmethod @abstractmethod def from_genesis(cls, base_db:", "None: head = self.get_canonical_head() return self.create_block_from_parent(head, FromBlockParams()) else: return block", "# @classmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock)", "the ``StateMachine`` class of the block descending from the given", "Tuple[BaseBeaconBlock, ...]]: pass class BeaconChain(BaseBeaconChain): \"\"\" A Chain is a", ") from eth_utils import ( ValidationError, encode_hex, ) from eth2._utils.ssz", "one or more ``StateMachine`` classes. Each ``StateMachine`` is associated with", "from eth2._utils.ssz import ( validate_imported_block_unchanged, ) from eth2.beacon.db.chain import (", "BeaconChain objects \"\"\" chaindb = None # type: BaseBeaconChainDB chaindb_class", "from eth_utils import ( ValidationError, encode_hex, ) from eth2._utils.ssz import", "is None: raise AttributeError(\"`chaindb_class` not set\") return cls.chaindb_class # #", "= logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class = BeaconChainDB # type: Type[BaseBeaconChainDB] def __init__(self,", "return cls(base_db) # # StateMachine API # @classmethod def get_state_machine_class(cls,", "cls._from_genesis_block(base_db, genesis_block) @classmethod def _from_genesis_block(cls, base_db: BaseAtomicDB, genesis_block: BaseBeaconBlock) ->", "block slot number. \"\"\" return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def get_state_machine_class_for_block_slot( cls,", "StateMachine.block_class: {}\".format( type(genesis_block), sm_class.block_class ) ) chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state)", "score of the block with the given hash. Raise ``BlockNotFound``", "from typing import ( TYPE_CHECKING, Tuple, Type, ) from eth._utils.datatypes", "...]]: \"\"\" Import a complete block and returns a 3-tuple", ") from eth2.beacon.exceptions import ( BlockClassError, StateMachineNotFound, ) from eth2.beacon.types.blocks", "-> Type['BaseBeaconStateMachine']: \"\"\" Returns the ``StateMachine`` instance for the given", "( TYPE_CHECKING, Tuple, Type, ) from eth._utils.datatypes import ( Configurable,", "pass @abstractmethod def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock:", "sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block) != sm_class.block_class: raise BlockClassError( \"Given", "# type: int # # Helpers # @classmethod @abstractmethod def", "which are now part of the canonical chain. - a", "def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain':", "-> BaseBeaconBlock: pass @abstractmethod def get_score(self, block_root: Hash32) -> int:", "slot number. \"\"\" logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class = BeaconChainDB #", "classes. Each ``StateMachine`` is associated with a range of slots.", "# type: Type[BaseBeaconChainDB] def __init__(self, base_db: BaseAtomicDB) -> None: if", "state. self.chaindb.persist_state(state) ( new_canonical_blocks, old_canonical_blocks, ) = self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug(", "eth.db.backends.base import ( BaseAtomicDB, ) from eth.exceptions import ( BlockNotFound,", "self, block: BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...],", "StateMachine depending on the current block slot number. \"\"\" logger", "block_params: FromBlockParams) -> BaseBeaconBlock: \"\"\" Passthrough helper to the ``StateMachine``", "Return the score of the block with the given hash.", "# # State Machine API # @classmethod @abstractmethod def get_state_machine_class(", "``StateMachine`` instance for the given block number. \"\"\" block =", "\" \"its parent block at {}\".format( block.slot, block.signed_root, block.previous_block_root, )", "def get_block(self) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_by_slot(self, slot: Slot)", ") ) chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db, genesis_block) @classmethod", "# # StateMachine API # @classmethod def get_state_machine_class(cls, block: BaseBeaconBlock)", "more ``StateMachine`` classes. Each ``StateMachine`` is associated with a range", "else: return block def get_block(self) -> BaseBeaconBlock: \"\"\" Return the", "of blocks which were canonical and now are no longer", "import ( BaseBeaconBlock, ) from eth2.beacon.types.states import ( BeaconState, )", "with a range of slots. The Chain class acts as", "BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]:", "eth2.beacon.db.chain import ( BaseBeaconChainDB, BeaconChainDB, ) from eth2.beacon.exceptions import (", "\"\"\" logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class = BeaconChainDB # type: Type[BaseBeaconChainDB]", "get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': \"\"\" Return the ``StateMachine`` instance", "chain. \"\"\" return self.chaindb.get_canonical_block_root(slot) def import_block( self, block: BaseBeaconBlock, perform_validation:", "of one or more ``StateMachine`` classes. Each ``StateMachine`` is associated", ") from eth2.beacon.types.blocks import ( BaseBeaconBlock, ) from eth2.beacon.types.states import", "try: parent_block = self.get_block_by_root(block.previous_block_root) except BlockNotFound: raise ValidationError( \"Attempt to", "...]]: pass class BeaconChain(BaseBeaconChain): \"\"\" A Chain is a combination", "a genesis state. \"\"\" sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block) !=", "for the canonical chain. \"\"\" block_root = self.chaindb.get_canonical_head_root() block_class =", "Type['BaseBeaconStateMachine']: pass @abstractmethod def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': pass", "new_canonical_blocks, old_canonical_blocks, ) = self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK: slot %s", "genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain``", "...], Tuple[BaseBeaconBlock, ...]]: pass class BeaconChain(BaseBeaconChain): \"\"\" A Chain is", "validate_sm_configuration(self.sm_configuration) pass self.chaindb = self.get_chaindb_class()(base_db) # # Helpers # @classmethod", "# # Block API # @abstractmethod def get_block_class(self, block_root: Hash32)", "FromBlockParams) -> BaseBeaconBlock: \"\"\" Passthrough helper to the ``StateMachine`` class", "ABC, abstractmethod, ) import logging from typing import ( TYPE_CHECKING,", "BaseBeaconBlock=None) -> BaseBeaconBlock: pass @abstractmethod def get_block(self) -> BaseBeaconBlock: pass", "block slot: #{0}\".format(slot)) def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': \"\"\"", "if block is None: head = self.get_canonical_head() return self.create_block_from_parent(head, FromBlockParams())", "StateMachine available for block slot: #{0}\".format(slot)) def get_state_machine(self, at_block: BaseBeaconBlock=None)", "by block hash. Raise ``BlockNotFound`` if there's no block with", "pass class BeaconChain(BaseBeaconChain): \"\"\" A Chain is a combination of", "``StateMachine`` classes. Each ``StateMachine`` is associated with a range of", "# StateMachine API # @classmethod def get_state_machine_class(cls, block: BaseBeaconBlock) ->", "no head defined for the canonical chain. \"\"\" block_root =", "in the canonical chain. \"\"\" validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self,", "self.sm_configuration: raise ValueError( \"The Chain class cannot be instantiated with", "``BeaconChain`` from the genesis block. \"\"\" chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block,", "Type[BaseBeaconBlock]: pass @abstractmethod def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) ->", "\"\"\" if cls.sm_configuration is None: raise AttributeError(\"Chain classes must define", "is associated with a range of slots. The Chain class", "given block. \"\"\" return self.get_state_machine_class_for_block_slot( slot=parent_block.slot + 1 if block_params.slot", "import ( FromBlockParams, Slot, ) from eth2.beacon.validation import ( validate_slot,", "get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: pass # # Block", "\"\"\" Return the ``StateMachine`` class for the given block slot", "-> BaseBeaconBlock: \"\"\" Passthrough helper to the ``StateMachine`` class of", "Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: \"\"\" Import a complete block and", "3-tuple - the imported block - a tuple of blocks", "pass @abstractmethod def get_score(self, block_root: Hash32) -> int: pass @abstractmethod", "-> Type['BaseBeaconChainDB']: if cls.chaindb_class is None: raise AttributeError(\"`chaindb_class` not set\")", "return self.chaindb.get_score(block_root) def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: \"\"\" Return", "@classmethod @abstractmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock)", "head = self.get_canonical_head() return self.create_block_from_parent(head, FromBlockParams()) else: return block def", "# TODO: Now it just persists all state. Should design", "to import block #{}. Cannot import block {} before importing", "for the given block slot number. \"\"\" return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod", "raise AttributeError(\"`chaindb_class` not set\") return cls.chaindb_class # # Chain API", "Slot) -> Type['BaseBeaconStateMachine']: pass # # Block API # @abstractmethod", "it is not ``None``, otherwise return the block of the", "BlockNotFound, ) from eth.validation import ( validate_word, ) from eth_typing", "def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_root(self,", "Returns the ``StateMachine`` instance for the given block slot number.", "a tuple of blocks which are now part of the", "BaseBeaconBlock=None) -> BaseBeaconBlock: \"\"\" Return ``block`` if it is not", "type: Type[BaseBeaconChainDB] sm_configuration = None # type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...]", "\"\"\" Return the block hash with the given number in", "given block slot number. \"\"\" return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def get_state_machine_class_for_block_slot(", "block {} before importing \" \"its parent block at {}\".format(", "base_block_for_import = self.create_block_from_parent( parent_block, FromBlockParams(), ) state, imported_block = self.get_state_machine(base_block_for_import).import_block(block)", "= None # type: BaseBeaconChainDB chaindb_class = None # type:", "the given number in the canonical chain. \"\"\" validate_slot(slot) return", "importing \" \"its parent block at {}\".format( block.slot, block.signed_root, block.previous_block_root,", "None # type: Type[BaseBeaconChainDB] sm_configuration = None # type: Tuple[Tuple[Slot,", "create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: pass @abstractmethod def", "def _from_genesis_block(cls, base_db: BaseAtomicDB, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize", "associated with a range of slots. The Chain class acts", "BaseBeaconBlock: \"\"\" Return the block with the given number in", "-> BaseBeaconBlock: \"\"\" Return ``block`` if it is not ``None``,", "ValidationError, encode_hex, ) from eth2._utils.ssz import ( validate_imported_block_unchanged, ) from", "cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: pass @abstractmethod def get_state_machine(self, at_block:", "ValidationError( \"Attempt to import block #{}. Cannot import block {}", "import block #{}. Cannot import block {} before importing \"", "slot %s | signed root %s', imported_block.slot, encode_hex(imported_block.signed_root), ) return", "number. \"\"\" return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def get_state_machine_class_for_block_slot( cls, slot: Slot)", "BeaconChain(BaseBeaconChain): \"\"\" A Chain is a combination of one or", "BeaconChainDB # type: Type[BaseBeaconChainDB] def __init__(self, base_db: BaseAtomicDB) -> None:", "raise StateMachineNotFound(\"No StateMachine available for block slot: #{0}\".format(slot)) def get_state_machine(self,", "get_block(self) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_by_slot(self, slot: Slot) ->", "Tuple[BaseBeaconBlock, ...]]: \"\"\" Import a complete block and returns a", "from eth.db.backends.base import ( BaseAtomicDB, ) from eth.exceptions import (", "Hash32, ) from eth_utils import ( ValidationError, encode_hex, ) from", "Hash32) -> BaseBeaconBlock: \"\"\" Return the requested block as specified", "BlockClassError( \"Given genesis block class: {}, StateMachine.block_class: {}\".format( type(genesis_block), sm_class.block_class", "chaindb_class = BeaconChainDB # type: Type[BaseBeaconChainDB] def __init__(self, base_db: BaseAtomicDB)", "BaseAtomicDB) -> None: if not self.sm_configuration: raise ValueError( \"The Chain", "# Helpers # @classmethod def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']: if cls.chaindb_class", "= None # type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...] chain_id = None", "cls, slot: Slot) -> Type['BaseBeaconStateMachine']: \"\"\" Return the ``StateMachine`` class", "TODO implment validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration) pass self.chaindb = self.get_chaindb_class()(base_db) #", "Configurable, ) from eth.db.backends.base import ( BaseAtomicDB, ) from eth.exceptions", "def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']: if cls.chaindb_class is None: raise AttributeError(\"`chaindb_class`", "Return the requested block as specified by block hash. Raise", "the ``BeaconChain`` from a genesis state. \"\"\" sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot)", "# @classmethod @abstractmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block:", "canonical chain head. Raise ``CanonicalHeadNotFound`` if there's no head defined", "API # def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: slot =", "block: BaseBeaconBlock=None) -> BaseBeaconBlock: pass @abstractmethod def get_block(self) -> BaseBeaconBlock:", "-> Type['BaseBeaconStateMachine']: pass # # Block API # @abstractmethod def", "import ( BeaconState, ) from eth2.beacon.typing import ( FromBlockParams, Slot,", "def get_canonical_head(self) -> BaseBeaconBlock: pass @abstractmethod def get_score(self, block_root: Hash32)", ") if TYPE_CHECKING: from eth2.beacon.state_machines.base import ( # noqa: F401", "from eth2.beacon.typing import ( FromBlockParams, Slot, ) from eth2.beacon.validation import", "Hash32) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_head(self) -> BaseBeaconBlock: pass", "BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: \"\"\" Returns the ``StateMachine`` instance for the", "of the block with the given hash. Raise ``BlockNotFound`` if", "is no matching black hash. \"\"\" return self.chaindb.get_score(block_root) def ensure_block(self,", "\"its parent block at {}\".format( block.slot, block.signed_root, block.previous_block_root, ) )", "if type(genesis_block) != sm_class.block_class: raise BlockClassError( \"Given genesis block class:", "validate_imported_block_unchanged, ) from eth2.beacon.db.chain import ( BaseBeaconChainDB, BeaconChainDB, ) from", "block hash. Raise ``BlockNotFound`` if there's no block with the", "Raise ``BlockNotFound`` if there's no block with the given hash", "given number in the canonical chain. \"\"\" return self.chaindb.get_canonical_block_root(slot) def", "Raise ``BlockNotFound`` if there is no matching black hash. \"\"\"", "@classmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: \"\"\" Return", "hash in the db. \"\"\" validate_word(block_root, title=\"Block Hash\") block_class =", ") from eth2.beacon.types.states import ( BeaconState, ) from eth2.beacon.typing import", "given hash in the db. \"\"\" validate_word(block_root, title=\"Block Hash\") block_class", "in reversed(cls.sm_configuration): if slot >= start_slot: return sm_class raise StateMachineNotFound(\"No", "the current block slot number. \"\"\" logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class", "type: BaseBeaconChainDB chaindb_class = None # type: Type[BaseBeaconChainDB] sm_configuration =", "def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: \"\"\" Return the", "API # @classmethod @abstractmethod def get_state_machine_class( cls, block: BaseBeaconBlock) ->", "eth.validation import ( validate_word, ) from eth_typing import ( Hash32,", "perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: \"\"\"", "= BeaconChainDB # type: Type[BaseBeaconChainDB] def __init__(self, base_db: BaseAtomicDB) ->", "( ValidationError, encode_hex, ) from eth2._utils.ssz import ( validate_imported_block_unchanged, )", "The base class for all BeaconChain objects \"\"\" chaindb =", "FromBlockParams, Slot, ) from eth2.beacon.validation import ( validate_slot, ) if", "slot number. \"\"\" if cls.sm_configuration is None: raise AttributeError(\"Chain classes", "%s | signed root %s', imported_block.slot, encode_hex(imported_block.signed_root), ) return imported_block,", "= self.get_state_machine_class_for_block_slot(block.slot) return sm_class( chaindb=self.chaindb, block=block, ) # # Block", "from abc import ( ABC, abstractmethod, ) import logging from", "block class: {}, StateMachine.block_class: {}\".format( type(genesis_block), sm_class.block_class ) ) chaindb", "other StateMachine classes, delegating operations to the appropriate StateMachine depending", "the ``BeaconChain`` from the genesis block. \"\"\" chaindb = cls.get_chaindb_class()(db=base_db)", "hash. \"\"\" return self.chaindb.get_score(block_root) def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock:", "\"\"\" Return the current TIP block. \"\"\" return self.get_state_machine().block def", "def __init__(self, base_db: BaseAtomicDB) -> None: if not self.sm_configuration: raise", "self.get_state_machine(base_block_for_import).import_block(block) # Validate the imported block. if perform_validation: validate_imported_block_unchanged(imported_block, block)", "Raise ``CanonicalHeadNotFound`` if there's no head defined for the canonical", "instance for the given block number. \"\"\" block = self.ensure_block(at_block)", "def get_score(self, block_root: Hash32) -> int: \"\"\" Return the score", "import ( Hash32, ) from eth_utils import ( ValidationError, encode_hex,", "-> BaseBeaconBlock: \"\"\" Return the current TIP block. \"\"\" return", "= cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db, genesis_block) @classmethod def _from_genesis_block(cls, base_db:", "if there's no block with the given number in the", ") from eth.validation import ( validate_word, ) from eth_typing import", "raise ValueError( \"The Chain class cannot be instantiated with an", "def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: \"\"\" Passthrough", "None # type: BaseBeaconChainDB chaindb_class = None # type: Type[BaseBeaconChainDB]", "``block`` if it is not ``None``, otherwise return the block", "\"\"\" validate_word(block_root, title=\"Block Hash\") block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class)", "imported_block = self.get_state_machine(base_block_for_import).import_block(block) # Validate the imported block. if perform_validation:", "a complete block and returns a 3-tuple - the imported", "on the current block slot number. \"\"\" logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\")", "get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: \"\"\" Return the ``StateMachine``", "get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_head(self) ->", "of the canonical head. \"\"\" if block is None: head", "or more ``StateMachine`` classes. Each ``StateMachine`` is associated with a", "class for the given block slot number. \"\"\" if cls.sm_configuration", "!= sm_class.block_class: raise BlockClassError( \"Given genesis block class: {}, StateMachine.block_class:", "genesis state. \"\"\" sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block) != sm_class.block_class:", "self.create_block_from_parent( parent_block, FromBlockParams(), ) state, imported_block = self.get_state_machine(base_block_for_import).import_block(block) # Validate", "return the block of the canonical head. \"\"\" if block", "block) # TODO: Now it just persists all state. Should", "\"\"\" Initialize the ``BeaconChain`` from the genesis block. \"\"\" chaindb", "Should design how to clean up the old state. self.chaindb.persist_state(state)", "import ( BaseBeaconChainDB, BeaconChainDB, ) from eth2.beacon.exceptions import ( BlockClassError,", "type: Type[BaseBeaconChainDB] def __init__(self, base_db: BaseAtomicDB) -> None: if not", "@abstractmethod def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': pass @classmethod @abstractmethod", "\"\"\" Return the block with the given number in the", "StateMachines in sm_configuration\") validate_slot(slot) for start_slot, sm_class in reversed(cls.sm_configuration): if", "@abstractmethod def get_state_machine_class( cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: pass @abstractmethod", "@abstractmethod def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: pass @abstractmethod def", "API # @abstractmethod def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: pass", "Return ``block`` if it is not ``None``, otherwise return the" ]
[ "send command to the device device_access.send(\"ter len 0\\nshow run \\n\")", "FOR SINGLE COMMAND, FOR MULTIPLE COMMANDS CODE BELOW # send", "client.invoke_shell() for line in f2: device_access.send(line) time.sleep(1) output = device_access.recv(55000).decode('ascii')", "= input('Provide IP address to connect to: ') user =", "f1 = open(\"devices.txt\",\"r\") f2 = open(\"commands.txt\",\"r\") for line in f1:", "find available options. # auto adjust host key verification with", "device_access.send(\"ter len 0\\nshow run \\n\") time.sleep(2) # receive output from", "# Taking input from files f1 = open(\"devices.txt\",\"r\") f2 =", "#using as SSH Client client = paramiko.SSHClient() # check dir(client)", "format and print it print(device_access.recv(550000).decode('ascii')) # We can print the", "# We can print the same to a file too", "device, convert it to byte-like format and print it print(device_access.recv(550000).decode('ascii'))", "output = device_access.recv(55000).decode('ascii') f3.write(output) \"\"\" THIS CODE IS FOR SINGLE", "open(\"devices.txt\",\"r\") f2 = open(\"commands.txt\",\"r\") for line in f1: client =", "# send command to the device device_access.send(\"ter len 0\\nshow run", "data[1] pwd = data[2] f3 = open(addr+\".txt\",\"w+\") # print(addr +\"", "print it print(device_access.recv(550000).decode('ascii')) # We can print the same to", "\"+ user +\" \" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we have to", "COMMAND, FOR MULTIPLE COMMANDS CODE BELOW # send command to", "pwd = data[2] f3 = open(addr+\".txt\",\"w+\") # print(addr +\" \"+", "# check dir(client) to find available options. # auto adjust", "convert it to byte-like format and print it print(device_access.recv(550000).decode('ascii')) #", "addr = input('Provide IP address to connect to: ') user", "data[2] f3 = open(addr+\".txt\",\"w+\") # print(addr +\" \"+ user +\"", "to remote Cisco IOS \"\"\" Manually taking input addr =", "connect to: ') user = input('Username: ') pwd = <PASSWORD>('Password:", "time.sleep(1) output = device_access.recv(55000).decode('ascii') f3.write(output) \"\"\" THIS CODE IS FOR", "client = paramiko.SSHClient() # check dir(client) to find available options.", "IP address to connect to: ') user = input('Username: ')", "device device_access.send(\"ter len 0\\nshow run \\n\") time.sleep(2) # receive output", "have to ask for Shell device_access = client.invoke_shell() for line", "# print(data) addr = data[0] user = data[1] pwd =", "THIS CODE IS FOR SINGLE COMMAND, FOR MULTIPLE COMMANDS CODE", "input from files f1 = open(\"devices.txt\",\"r\") f2 = open(\"commands.txt\",\"r\") for", "\\n\") time.sleep(2) # receive output from the device, convert it", "available options. # auto adjust host key verification with yes", "= paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data = line.split(\" \") # print(data) addr", "+\" \"+ user +\" \" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we have", "time for connecting to remote Cisco IOS \"\"\" Manually taking", "= <PASSWORD>('Password: ')\"\"\" # Taking input from files f1 =", "dir(client) to find available options. # auto adjust host key", "= device_access.recv(55000).decode('ascii') f3.write(output) \"\"\" THIS CODE IS FOR SINGLE COMMAND,", "CODE IS FOR SINGLE COMMAND, FOR MULTIPLE COMMANDS CODE BELOW", "CODE BELOW # send command to the device device_access.send(\"ter len", "the same to a file too with open(\"csr1000v.txt\",\"w\") as f:", "# we have to ask for Shell device_access = client.invoke_shell()", "device_access = client.invoke_shell() for line in f2: device_access.send(line) time.sleep(1) output", "client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we have to ask for Shell device_access =", "input('Username: ') pwd = <PASSWORD>('Password: ')\"\"\" # Taking input from", "Manually taking input addr = input('Provide IP address to connect", "Shell device_access = client.invoke_shell() for line in f2: device_access.send(line) time.sleep(1)", "len 0\\nshow run \\n\") time.sleep(2) # receive output from the", "We can print the same to a file too with", "open(\"commands.txt\",\"r\") for line in f1: client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data", "= data[1] pwd = data[2] f3 = open(addr+\".txt\",\"w+\") # print(addr", "print(data) addr = data[0] user = data[1] pwd = data[2]", "ask for Shell device_access = client.invoke_shell() for line in f2:", "print(device_access.recv(550000).decode('ascii')) # We can print the same to a file", "run \\n\") time.sleep(2) # receive output from the device, convert", "remote Cisco IOS \"\"\" Manually taking input addr = input('Provide", "client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time for connecting to remote Cisco IOS \"\"\"", "line in f2: device_access.send(line) time.sleep(1) output = device_access.recv(55000).decode('ascii') f3.write(output) \"\"\"", "= line.split(\" \") # print(data) addr = data[0] user =", "+\" \" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we have to ask for", "check dir(client) to find available options. # auto adjust host", "pwd = <PASSWORD>('Password: ')\"\"\" # Taking input from files f1", "print the same to a file too with open(\"csr1000v.txt\",\"w\") as", "client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data = line.split(\" \") # print(data) addr = data[0]", "# receive output from the device, convert it to byte-like", "') pwd = <PASSWORD>('Password: ')\"\"\" # Taking input from files", "receive output from the device, convert it to byte-like format", "output from the device, convert it to byte-like format and", "client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data = line.split(\" \") # print(data)", "= input('Username: ') pwd = <PASSWORD>('Password: ')\"\"\" # Taking input", "import paramiko,time #using as SSH Client client = paramiko.SSHClient() #", "FOR MULTIPLE COMMANDS CODE BELOW # send command to the", "to byte-like format and print it print(device_access.recv(550000).decode('ascii')) # We can", "in f1: client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data = line.split(\" \")", "= data[0] user = data[1] pwd = data[2] f3 =", "#!/usr/local/bin/python3 import paramiko,time #using as SSH Client client = paramiko.SSHClient()", "\") # print(data) addr = data[0] user = data[1] pwd", "yes or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time for connecting to remote", "Cisco IOS \"\"\" Manually taking input addr = input('Provide IP", "f2 = open(\"commands.txt\",\"r\") for line in f1: client = paramiko.SSHClient()", "device_access.recv(55000).decode('ascii') f3.write(output) \"\"\" THIS CODE IS FOR SINGLE COMMAND, FOR", "') user = input('Username: ') pwd = <PASSWORD>('Password: ')\"\"\" #", "# auto adjust host key verification with yes or no", "COMMANDS CODE BELOW # send command to the device device_access.send(\"ter", "as SSH Client client = paramiko.SSHClient() # check dir(client) to", "for Shell device_access = client.invoke_shell() for line in f2: device_access.send(line)", "to find available options. # auto adjust host key verification", "\" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we have to ask for Shell", "data = line.split(\" \") # print(data) addr = data[0] user", "auto adjust host key verification with yes or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy())", "from files f1 = open(\"devices.txt\",\"r\") f2 = open(\"commands.txt\",\"r\") for line", "# print(addr +\" \"+ user +\" \" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) #", "with yes or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time for connecting to", "user = input('Username: ') pwd = <PASSWORD>('Password: ')\"\"\" # Taking", "to: ') user = input('Username: ') pwd = <PASSWORD>('Password: ')\"\"\"", "for line in f1: client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data =", "open(addr+\".txt\",\"w+\") # print(addr +\" \"+ user +\" \" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False)", "can print the same to a file too with open(\"csr1000v.txt\",\"w\")", "it print(device_access.recv(550000).decode('ascii')) # We can print the same to a", "addr = data[0] user = data[1] pwd = data[2] f3", "Client client = paramiko.SSHClient() # check dir(client) to find available", "\"\"\" Manually taking input addr = input('Provide IP address to", "# time for connecting to remote Cisco IOS \"\"\" Manually", "MULTIPLE COMMANDS CODE BELOW # send command to the device", "paramiko.SSHClient() # check dir(client) to find available options. # auto", "')\"\"\" # Taking input from files f1 = open(\"devices.txt\",\"r\") f2", "taking input addr = input('Provide IP address to connect to:", "we have to ask for Shell device_access = client.invoke_shell() for", "user = data[1] pwd = data[2] f3 = open(addr+\".txt\",\"w+\") #", "it to byte-like format and print it print(device_access.recv(550000).decode('ascii')) # We", "= open(\"commands.txt\",\"r\") for line in f1: client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy())", "user +\" \" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we have to ask", "SINGLE COMMAND, FOR MULTIPLE COMMANDS CODE BELOW # send command", "key verification with yes or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time for", "options. # auto adjust host key verification with yes or", "print(addr +\" \"+ user +\" \" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we", "time.sleep(2) # receive output from the device, convert it to", "adjust host key verification with yes or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) #", "line in f1: client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data = line.split(\"", "= paramiko.SSHClient() # check dir(client) to find available options. #", "to ask for Shell device_access = client.invoke_shell() for line in", "verification with yes or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time for connecting", "= data[2] f3 = open(addr+\".txt\",\"w+\") # print(addr +\" \"+ user", "paramiko,time #using as SSH Client client = paramiko.SSHClient() # check", "to the device device_access.send(\"ter len 0\\nshow run \\n\") time.sleep(2) #", "data[0] user = data[1] pwd = data[2] f3 = open(addr+\".txt\",\"w+\")", "+pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we have to ask for Shell device_access", "paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data = line.split(\" \") # print(data) addr =", "from the device, convert it to byte-like format and print", "SSH Client client = paramiko.SSHClient() # check dir(client) to find", "device_access.send(line) time.sleep(1) output = device_access.recv(55000).decode('ascii') f3.write(output) \"\"\" THIS CODE IS", "the device, convert it to byte-like format and print it", "0\\nshow run \\n\") time.sleep(2) # receive output from the device,", "IS FOR SINGLE COMMAND, FOR MULTIPLE COMMANDS CODE BELOW #", "no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time for connecting to remote Cisco IOS", "f2: device_access.send(line) time.sleep(1) output = device_access.recv(55000).decode('ascii') f3.write(output) \"\"\" THIS CODE", "IOS \"\"\" Manually taking input addr = input('Provide IP address", "input addr = input('Provide IP address to connect to: ')", "f1: client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data = line.split(\" \") #", "host key verification with yes or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time", "\"\"\" THIS CODE IS FOR SINGLE COMMAND, FOR MULTIPLE COMMANDS", "to connect to: ') user = input('Username: ') pwd =", "for line in f2: device_access.send(line) time.sleep(1) output = device_access.recv(55000).decode('ascii') f3.write(output)", "same to a file too with open(\"csr1000v.txt\",\"w\") as f: f.write(device_access.recv(550000).decode('ascii'))\"\"\"", "= client.invoke_shell() for line in f2: device_access.send(line) time.sleep(1) output =", "the device device_access.send(\"ter len 0\\nshow run \\n\") time.sleep(2) # receive", "input('Provide IP address to connect to: ') user = input('Username:", "<PASSWORD>('Password: ')\"\"\" # Taking input from files f1 = open(\"devices.txt\",\"r\")", "= open(addr+\".txt\",\"w+\") # print(addr +\" \"+ user +\" \" +pwd)", "and print it print(device_access.recv(550000).decode('ascii')) # We can print the same", "BELOW # send command to the device device_access.send(\"ter len 0\\nshow", "Taking input from files f1 = open(\"devices.txt\",\"r\") f2 = open(\"commands.txt\",\"r\")", "byte-like format and print it print(device_access.recv(550000).decode('ascii')) # We can print", "connecting to remote Cisco IOS \"\"\" Manually taking input addr", "in f2: device_access.send(line) time.sleep(1) output = device_access.recv(55000).decode('ascii') f3.write(output) \"\"\" THIS", "f3 = open(addr+\".txt\",\"w+\") # print(addr +\" \"+ user +\" \"", "line.split(\" \") # print(data) addr = data[0] user = data[1]", "files f1 = open(\"devices.txt\",\"r\") f2 = open(\"commands.txt\",\"r\") for line in", "= open(\"devices.txt\",\"r\") f2 = open(\"commands.txt\",\"r\") for line in f1: client", "or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time for connecting to remote Cisco", "command to the device device_access.send(\"ter len 0\\nshow run \\n\") time.sleep(2)", "address to connect to: ') user = input('Username: ') pwd", "f3.write(output) \"\"\" THIS CODE IS FOR SINGLE COMMAND, FOR MULTIPLE", "for connecting to remote Cisco IOS \"\"\" Manually taking input" ]
[ "10 # k = n % 10 # if n", "返回出去的列表 for i in l1: re_l1.append(i) def jisuan(str_num): he1 =", "# 返回出去的列表 for i in l1: re_l1.append(i) def jisuan(str_num): he1", "** 3 + k ** 3: # print(n) # 第一道题(16)", "# l1 = s1.split(' ') # l2 = [] #", "re_l1.append(i) def jisuan(str_num): he1 = 0 global out_l1 for i", "range(400,500): # i = n // 100 # j =", "% 10 # if n == i ** 3 +", "None while 1: in_1 = input(\"请输入数值:\") nums_l1 = in_1.split(' ')", "i = n // 100 # j = n //", "for i in l1: # if i.isdigit(): # l2.append(int(i)) #", "jisuan(str_num): he1 = 0 global out_l1 for i in l1():", "== i ** 3 + j ** 3 + k", "l1: re_l1.append(i) def jisuan(str_num): he1 = 0 global out_l1 for", "re_l1 = [] # 返回出去的列表 for i in l1: re_l1.append(i)", "print(n) # 第一道题(16) # input(\"请输入(第一次):\") # s1 = input(\"请输入(第二次):\") #", "** 3 + j ** 3 + k ** 3:", "# 第一道题(16) # input(\"请输入(第一次):\") # s1 = input(\"请输入(第二次):\") # l1", "input(\"请输入(第二次):\") # l1 = s1.split(' ') # l2 = []", "= s1.split(' ') # l2 = [] # for i", "i ** 3 + j ** 3 + k **", "# 第二道题(17) out_l1 = [] def bian_int_list(l1): re_l1 = []", "10 # if n == i ** 3 + j", "= input(\"请输入(第二次):\") # l1 = s1.split(' ') # l2 =", "3 + j ** 3 + k ** 3: #", "l2: # if not (i % 6): # print(i, end=\"", "for i in l2: # if not (i % 6):", "[] def bian_int_list(l1): re_l1 = [] # 返回出去的列表 for i", "bian_int_list(l1): re_l1 = [] # 返回出去的列表 for i in l1:", "input(\"请输入(第一次):\") # s1 = input(\"请输入(第二次):\") # l1 = s1.split(' ')", "= 0 global out_l1 for i in l1(): he1 +=", "k ** 3: # print(n) # 第一道题(16) # input(\"请输入(第一次):\") #", "in range(400,500): # i = n // 100 # j", "第二道题(17) out_l1 = [] def bian_int_list(l1): re_l1 = [] #", "n // 100 # j = n // 10 %", "l1 = s1.split(' ') # l2 = [] # for", "# for i in l2: # if not (i %", "int(i)**2 if he1 > int(str_num): out_l1.append(str_num) return None while 1:", "# if n == i ** 3 + j **", "n // 10 % 10 # k = n %", "print(i, end=\" \") # 第二道题(17) out_l1 = [] def bian_int_list(l1):", "out_l1.append(str_num) return None while 1: in_1 = input(\"请输入数值:\") nums_l1 =", "for i in l1(): he1 += int(i)**2 if he1 >", "# for i in l1: # if i.isdigit(): # l2.append(int(i))", "n == i ** 3 + j ** 3 +", "s1.split(' ') # l2 = [] # for i in", "0 global out_l1 for i in l1(): he1 += int(i)**2", "l1: # if i.isdigit(): # l2.append(int(i)) # for i in", "// 10 % 10 # k = n % 10", "= n % 10 # if n == i **", "# print(n) # 第一道题(16) # input(\"请输入(第一次):\") # s1 = input(\"请输入(第二次):\")", "# l2.append(int(i)) # for i in l2: # if not", "10 % 10 # k = n % 10 #", "// 100 # j = n // 10 % 10", "def jisuan(str_num): he1 = 0 global out_l1 for i in", "i in l1: re_l1.append(i) def jisuan(str_num): he1 = 0 global", "# i = n // 100 # j = n", "= n // 100 # j = n // 10", "n % 10 # if n == i ** 3", "j = n // 10 % 10 # k =", "# if not (i % 6): # print(i, end=\" \")", "= [] # for i in l1: # if i.isdigit():", "= [] def bian_int_list(l1): re_l1 = [] # 返回出去的列表 for", "int(str_num): out_l1.append(str_num) return None while 1: in_1 = input(\"请输入数值:\") nums_l1", "= [] # 返回出去的列表 for i in l1: re_l1.append(i) def", "return None while 1: in_1 = input(\"请输入数值:\") nums_l1 = in_1.split('", "if he1 > int(str_num): out_l1.append(str_num) return None while 1: in_1", "i in l1(): he1 += int(i)**2 if he1 > int(str_num):", "** 3: # print(n) # 第一道题(16) # input(\"请输入(第一次):\") # s1", "[] # for i in l1: # if i.isdigit(): #", "if not (i % 6): # print(i, end=\" \") #", "+ j ** 3 + k ** 3: # print(n)", "i in l2: # if not (i % 6): #", "def bian_int_list(l1): re_l1 = [] # 返回出去的列表 for i in", "l2.append(int(i)) # for i in l2: # if not (i", "in l2: # if not (i % 6): # print(i,", "i.isdigit(): # l2.append(int(i)) # for i in l2: # if", "in l1(): he1 += int(i)**2 if he1 > int(str_num): out_l1.append(str_num)", "6): # print(i, end=\" \") # 第二道题(17) out_l1 = []", "# if i.isdigit(): # l2.append(int(i)) # for i in l2:", "if n == i ** 3 + j ** 3", "he1 > int(str_num): out_l1.append(str_num) return None while 1: in_1 =", "% 10 # k = n % 10 # if", "l2 = [] # for i in l1: # if", "+ k ** 3: # print(n) # 第一道题(16) # input(\"请输入(第一次):\")", "i in l1: # if i.isdigit(): # l2.append(int(i)) # for", "in l1: # if i.isdigit(): # l2.append(int(i)) # for i", "= n // 10 % 10 # k = n", "j ** 3 + k ** 3: # print(n) #", "') # l2 = [] # for i in l1:", "he1 = 0 global out_l1 for i in l1(): he1", "# l2 = [] # for i in l1: #", "第一道题(16) # input(\"请输入(第一次):\") # s1 = input(\"请输入(第二次):\") # l1 =", "3 + k ** 3: # print(n) # 第一道题(16) #", "+= int(i)**2 if he1 > int(str_num): out_l1.append(str_num) return None while", "(i % 6): # print(i, end=\" \") # 第二道题(17) out_l1", "for n in range(400,500): # i = n // 100", "out_l1 = [] def bian_int_list(l1): re_l1 = [] # 返回出去的列表", "# s1 = input(\"请输入(第二次):\") # l1 = s1.split(' ') #", "100 # j = n // 10 % 10 #", "out_l1 for i in l1(): he1 += int(i)**2 if he1", "if i.isdigit(): # l2.append(int(i)) # for i in l2: #", "n in range(400,500): # i = n // 100 #", "l1(): he1 += int(i)**2 if he1 > int(str_num): out_l1.append(str_num) return", "# j = n // 10 % 10 # k", "end=\" \") # 第二道题(17) out_l1 = [] def bian_int_list(l1): re_l1", "# for n in range(400,500): # i = n //", "[] # 返回出去的列表 for i in l1: re_l1.append(i) def jisuan(str_num):", "3: # print(n) # 第一道题(16) # input(\"请输入(第一次):\") # s1 =", "k = n % 10 # if n == i", "% 6): # print(i, end=\" \") # 第二道题(17) out_l1 =", "\") # 第二道题(17) out_l1 = [] def bian_int_list(l1): re_l1 =", "in l1: re_l1.append(i) def jisuan(str_num): he1 = 0 global out_l1", "# k = n % 10 # if n ==", "global out_l1 for i in l1(): he1 += int(i)**2 if", "# print(i, end=\" \") # 第二道题(17) out_l1 = [] def", "> int(str_num): out_l1.append(str_num) return None while 1: in_1 = input(\"请输入数值:\")", "# input(\"请输入(第一次):\") # s1 = input(\"请输入(第二次):\") # l1 = s1.split('", "not (i % 6): # print(i, end=\" \") # 第二道题(17)", "s1 = input(\"请输入(第二次):\") # l1 = s1.split(' ') # l2", "he1 += int(i)**2 if he1 > int(str_num): out_l1.append(str_num) return None", "for i in l1: re_l1.append(i) def jisuan(str_num): he1 = 0" ]
[ "need to run early, so they get a high priority", "long lists, but adds a little complexity and doesn’t really", "default arguments for the alias Dagoba.addPipetype(newname, function() {}) # because", "Dagoba.addTransformer(function(program) { return program.map(function(step) { if(step[0] != newname) return step", "fun: fun}) } \"\"\" \"\"\" Dagoba.transform = function(program) { return", "class Transformer: def __init__(self): self.T = [] def transform(self, program):", "defaults) { defaults = defaults || [] # default arguments", "function() {}) # because there's no method catchall in js", "but adds a little complexity and doesn’t really speed up", "a note in case this assumption turns out to be", "returns a program, plus a priority level. Higher priority transformers", "they get a high priority } \"\"\" \"\"\" Dagoba.extend =", "list[key] != 'undefined') return acc acc[key] = defaults[key] return acc", "turns out to be false — a binary search is", "{ return Object.keys(defaults).reduce(function(acc, key) { if(typeof list[key] != 'undefined') return", "} \"\"\" \"\"\" Dagoba.extend = function(list, defaults) { return Object.keys(defaults).reduce(function(acc,", "[] # transformers (more than meets the eye) \"\"\" \"\"\"", "= function(newname, oldname, defaults) { defaults = defaults || []", "defaults || [] # default arguments for the alias Dagoba.addPipetype(newname,", "!= newname) return step return [oldname, Dagoba.extend(step[1], defaults)] }) },", "is a function that accepts a program and returns a", "list linearly to add a new one. We’ll leave a", "function(fun, priority) { if(typeof fun != 'function') return Dagoba.error('Invalid transformer", "break Dagoba.T.splice(i, 0, {priority: priority, fun: fun}) } \"\"\" \"\"\"", "\"\"\" Dagoba.addAlias = function(newname, oldname, defaults) { defaults = defaults", "> Dagoba.T[i].priority) break Dagoba.T.splice(i, 0, {priority: priority, fun: fun}) }", "no method catchall in js Dagoba.addTransformer(function(program) { return program.map(function(step) {", "\"\"\" \"\"\" Dagoba.addAlias = function(newname, oldname, defaults) { defaults =", "these need to run early, so they get a high", "program, plus a priority level. Higher priority transformers are placed", "program): return program \"\"\" Dagoba.T = [] # transformers (more", "going to evaluate it later 31 . We’ll assume there", "# OPT: binary search if(priority > Dagoba.T[i].priority) break Dagoba.T.splice(i, 0,", "A query transformer is a function that accepts a program", "adds a little complexity and doesn’t really speed up short", "for long lists, but adds a little complexity and doesn’t", "function(program) { return Dagoba.T.reduce(function(acc, transformer) { return transformer.fun(acc) }, program)", "Dagoba.T.reduce(function(acc, transformer) { return transformer.fun(acc) }, program) } \"\"\" \"\"\"", "Dagoba.addPipetype(newname, function() {}) # because there's no method catchall in", "it later 31 . We’ll assume there won’t be an", "= function(list, defaults) { return Object.keys(defaults).reduce(function(acc, key) { if(typeof list[key]", "search is much more time-optimal for long lists, but adds", "a high priority } \"\"\" \"\"\" Dagoba.extend = function(list, defaults)", "< Dagoba.T.length; i++) # OPT: binary search if(priority > Dagoba.T[i].priority)", "meets the eye) \"\"\" \"\"\" Dagoba.addTransformer = function(fun, priority) {", "# because there's no method catchall in js Dagoba.addTransformer(function(program) {", "Object.keys(defaults).reduce(function(acc, key) { if(typeof list[key] != 'undefined') return acc acc[key]", "\"\"\" Dagoba.extend = function(list, defaults) { return Object.keys(defaults).reduce(function(acc, key) {", "assume there won’t be an enormous number of transformer additions,", "be false — a binary search is much more time-optimal", "\"\"\" Dagoba.transform = function(program) { return Dagoba.T.reduce(function(acc, transformer) { return", "= function(program) { return Dagoba.T.reduce(function(acc, transformer) { return transformer.fun(acc) },", "def __init__(self): self.T = [] def transform(self, program): return program", "Dagoba.addAlias = function(newname, oldname, defaults) { defaults = defaults ||", "get a high priority } \"\"\" \"\"\" Dagoba.extend = function(list,", "transformer function') for(var i = 0; i < Dagoba.T.length; i++)", "acc acc[key] = defaults[key] return acc }, list) } \"\"\"", "because we’re going to evaluate it later 31 . We’ll", "case this assumption turns out to be false — a", "\"\"\" \"\"\" Dagoba.extend = function(list, defaults) { return Object.keys(defaults).reduce(function(acc, key)", "to be false — a binary search is much more", "program) } \"\"\" \"\"\" Dagoba.addAlias = function(newname, oldname, defaults) {", "binary search is much more time-optimal for long lists, but", "fun}) } \"\"\" \"\"\" Dagoba.transform = function(program) { return Dagoba.T.reduce(function(acc,", "return Dagoba.T.reduce(function(acc, transformer) { return transformer.fun(acc) }, program) } \"\"\"", "catchall in js Dagoba.addTransformer(function(program) { return program.map(function(step) { if(step[0] !=", "defaults = defaults || [] # default arguments for the", "= defaults || [] # default arguments for the alias", "We’ll leave a note in case this assumption turns out", "add a new one. We’ll leave a note in case", "!= 'function') return Dagoba.error('Invalid transformer function') for(var i = 0;", "defaults) { return Object.keys(defaults).reduce(function(acc, key) { if(typeof list[key] != 'undefined')", "is a function, because we’re going to evaluate it later", "\"\"\" class Transformer: def __init__(self): self.T = [] def transform(self,", "Dagoba.T.splice(i, 0, {priority: priority, fun: fun}) } \"\"\" \"\"\" Dagoba.transform", "transformer is a function that accepts a program and returns", "\"\"\" \"\"\" Dagoba.addTransformer = function(fun, priority) { if(typeof fun !=", "lists. \"\"\" class Transformer: def __init__(self): self.T = [] def", "additions, and walk the list linearly to add a new", "Dagoba.extend = function(list, defaults) { return Object.keys(defaults).reduce(function(acc, key) { if(typeof", "self.T = [] def transform(self, program): return program \"\"\" Dagoba.T", "priority transformers are placed closer to the front of the", "= [] # transformers (more than meets the eye) \"\"\"", "out to be false — a binary search is much", "and doesn’t really speed up short lists. \"\"\" class Transformer:", "key) { if(typeof list[key] != 'undefined') return acc acc[key] =", "function, because we’re going to evaluate it later 31 .", "We’ll assume there won’t be an enormous number of transformer", "— a binary search is much more time-optimal for long", "return program \"\"\" Dagoba.T = [] # transformers (more than", "transform(self, program): return program \"\"\" Dagoba.T = [] # transformers", "program and returns a program, plus a priority level. Higher", "priority, fun: fun}) } \"\"\" \"\"\" Dagoba.transform = function(program) {", "{ return transformer.fun(acc) }, program) } \"\"\" \"\"\" Dagoba.addAlias =", "the alias Dagoba.addPipetype(newname, function() {}) # because there's no method", "later 31 . We’ll assume there won’t be an enormous", "be an enormous number of transformer additions, and walk the", "a binary search is much more time-optimal for long lists,", "Dagoba.addTransformer = function(fun, priority) { if(typeof fun != 'function') return", "early, so they get a high priority } \"\"\" \"\"\"", "this assumption turns out to be false — a binary", "{ defaults = defaults || [] # default arguments for", "{ if(typeof list[key] != 'undefined') return acc acc[key] = defaults[key]", "transformer additions, and walk the list linearly to add a", "because there's no method catchall in js Dagoba.addTransformer(function(program) { return", "a little complexity and doesn’t really speed up short lists.", "!= 'undefined') return acc acc[key] = defaults[key] return acc },", "0; i < Dagoba.T.length; i++) # OPT: binary search if(priority", "new one. We’ll leave a note in case this assumption", "closer to the front of the list. We’re ensuring is", ". We’ll assume there won’t be an enormous number of", "evaluate it later 31 . We’ll assume there won’t be", "linearly to add a new one. We’ll leave a note", "# transformers (more than meets the eye) \"\"\" \"\"\" Dagoba.addTransformer", "[oldname, Dagoba.extend(step[1], defaults)] }) }, 100) # these need to", "# default arguments for the alias Dagoba.addPipetype(newname, function() {}) #", "a function that accepts a program and returns a program,", "{ if(typeof fun != 'function') return Dagoba.error('Invalid transformer function') for(var", "if(typeof fun != 'function') return Dagoba.error('Invalid transformer function') for(var i", "defaults)] }) }, 100) # these need to run early,", "Higher priority transformers are placed closer to the front of", "transformers (more than meets the eye) \"\"\" \"\"\" Dagoba.addTransformer =", "false — a binary search is much more time-optimal for", "transformers are placed closer to the front of the list.", "much more time-optimal for long lists, but adds a little", "{ return program.map(function(step) { if(step[0] != newname) return step return", "newname) return step return [oldname, Dagoba.extend(step[1], defaults)] }) }, 100)", "}) }, 100) # these need to run early, so", "<filename>graphdb/transformer.py<gh_stars>1-10 \"\"\" A query transformer is a function that accepts", "i = 0; i < Dagoba.T.length; i++) # OPT: binary", "time-optimal for long lists, but adds a little complexity and", "OPT: binary search if(priority > Dagoba.T[i].priority) break Dagoba.T.splice(i, 0, {priority:", "Dagoba.T = [] # transformers (more than meets the eye)", "high priority } \"\"\" \"\"\" Dagoba.extend = function(list, defaults) {", "Dagoba.error('Invalid transformer function') for(var i = 0; i < Dagoba.T.length;", "query transformer is a function that accepts a program and", "up short lists. \"\"\" class Transformer: def __init__(self): self.T =", "return Object.keys(defaults).reduce(function(acc, key) { if(typeof list[key] != 'undefined') return acc", "= [] def transform(self, program): return program \"\"\" Dagoba.T =", "and walk the list linearly to add a new one.", "little complexity and doesn’t really speed up short lists. \"\"\"", "31 . We’ll assume there won’t be an enormous number", "the eye) \"\"\" \"\"\" Dagoba.addTransformer = function(fun, priority) { if(typeof", "return step return [oldname, Dagoba.extend(step[1], defaults)] }) }, 100) #", "100) # these need to run early, so they get", "method catchall in js Dagoba.addTransformer(function(program) { return program.map(function(step) { if(step[0]", "\"\"\" Dagoba.T = [] # transformers (more than meets the", "}, program) } \"\"\" \"\"\" Dagoba.addAlias = function(newname, oldname, defaults)", "Dagoba.extend(step[1], defaults)] }) }, 100) # these need to run", "placed closer to the front of the list. We’re ensuring", "front of the list. We’re ensuring is a function, because", "doesn’t really speed up short lists. \"\"\" class Transformer: def", "the list linearly to add a new one. We’ll leave", "for the alias Dagoba.addPipetype(newname, function() {}) # because there's no", "|| [] # default arguments for the alias Dagoba.addPipetype(newname, function()", "(more than meets the eye) \"\"\" \"\"\" Dagoba.addTransformer = function(fun,", "we’re going to evaluate it later 31 . We’ll assume", "Dagoba.transform = function(program) { return Dagoba.T.reduce(function(acc, transformer) { return transformer.fun(acc)", "{ if(step[0] != newname) return step return [oldname, Dagoba.extend(step[1], defaults)]", "than meets the eye) \"\"\" \"\"\" Dagoba.addTransformer = function(fun, priority)", "= 0; i < Dagoba.T.length; i++) # OPT: binary search", "more time-optimal for long lists, but adds a little complexity", "in js Dagoba.addTransformer(function(program) { return program.map(function(step) { if(step[0] != newname)", "the list. We’re ensuring is a function, because we’re going", "program.map(function(step) { if(step[0] != newname) return step return [oldname, Dagoba.extend(step[1],", "if(typeof list[key] != 'undefined') return acc acc[key] = defaults[key] return", "a program, plus a priority level. Higher priority transformers are", "function(newname, oldname, defaults) { defaults = defaults || [] #", "}, 100) # these need to run early, so they", "for(var i = 0; i < Dagoba.T.length; i++) # OPT:", "complexity and doesn’t really speed up short lists. \"\"\" class", "a program and returns a program, plus a priority level.", "level. Higher priority transformers are placed closer to the front", "really speed up short lists. \"\"\" class Transformer: def __init__(self):", "'function') return Dagoba.error('Invalid transformer function') for(var i = 0; i", "# these need to run early, so they get a", "def transform(self, program): return program \"\"\" Dagoba.T = [] #", "function that accepts a program and returns a program, plus", "to evaluate it later 31 . We’ll assume there won’t", "of transformer additions, and walk the list linearly to add", "__init__(self): self.T = [] def transform(self, program): return program \"\"\"", "accepts a program and returns a program, plus a priority", "{priority: priority, fun: fun}) } \"\"\" \"\"\" Dagoba.transform = function(program)", "return Dagoba.error('Invalid transformer function') for(var i = 0; i <", "binary search if(priority > Dagoba.T[i].priority) break Dagoba.T.splice(i, 0, {priority: priority,", "function') for(var i = 0; i < Dagoba.T.length; i++) #", "so they get a high priority } \"\"\" \"\"\" Dagoba.extend", "to the front of the list. We’re ensuring is a", "alias Dagoba.addPipetype(newname, function() {}) # because there's no method catchall", "transformer) { return transformer.fun(acc) }, program) } \"\"\" \"\"\" Dagoba.addAlias", "oldname, defaults) { defaults = defaults || [] # default", "return acc acc[key] = defaults[key] return acc }, list) }", "if(step[0] != newname) return step return [oldname, Dagoba.extend(step[1], defaults)] })", "{ return Dagoba.T.reduce(function(acc, transformer) { return transformer.fun(acc) }, program) }", "= function(fun, priority) { if(typeof fun != 'function') return Dagoba.error('Invalid", "won’t be an enormous number of transformer additions, and walk", "i++) # OPT: binary search if(priority > Dagoba.T[i].priority) break Dagoba.T.splice(i,", "enormous number of transformer additions, and walk the list linearly", "function(list, defaults) { return Object.keys(defaults).reduce(function(acc, key) { if(typeof list[key] !=", "is much more time-optimal for long lists, but adds a", "transformer.fun(acc) }, program) } \"\"\" \"\"\" Dagoba.addAlias = function(newname, oldname,", "js Dagoba.addTransformer(function(program) { return program.map(function(step) { if(step[0] != newname) return", "to run early, so they get a high priority }", "'undefined') return acc acc[key] = defaults[key] return acc }, list)", "leave a note in case this assumption turns out to", "\"\"\" A query transformer is a function that accepts a", "walk the list linearly to add a new one. We’ll", "return transformer.fun(acc) }, program) } \"\"\" \"\"\" Dagoba.addAlias = function(newname,", "priority } \"\"\" \"\"\" Dagoba.extend = function(list, defaults) { return", "search if(priority > Dagoba.T[i].priority) break Dagoba.T.splice(i, 0, {priority: priority, fun:", "one. We’ll leave a note in case this assumption turns", "Dagoba.T.length; i++) # OPT: binary search if(priority > Dagoba.T[i].priority) break", "plus a priority level. Higher priority transformers are placed closer", "number of transformer additions, and walk the list linearly to", "priority) { if(typeof fun != 'function') return Dagoba.error('Invalid transformer function')", "} \"\"\" \"\"\" Dagoba.addAlias = function(newname, oldname, defaults) { defaults", "arguments for the alias Dagoba.addPipetype(newname, function() {}) # because there's", "a priority level. Higher priority transformers are placed closer to", "an enormous number of transformer additions, and walk the list", "in case this assumption turns out to be false —", "We’re ensuring is a function, because we’re going to evaluate", "program \"\"\" Dagoba.T = [] # transformers (more than meets", "to add a new one. We’ll leave a note in", "short lists. \"\"\" class Transformer: def __init__(self): self.T = []", "list. We’re ensuring is a function, because we’re going to", "\"\"\" \"\"\" Dagoba.transform = function(program) { return Dagoba.T.reduce(function(acc, transformer) {", "assumption turns out to be false — a binary search", "there's no method catchall in js Dagoba.addTransformer(function(program) { return program.map(function(step)", "run early, so they get a high priority } \"\"\"", "0, {priority: priority, fun: fun}) } \"\"\" \"\"\" Dagoba.transform =", "note in case this assumption turns out to be false", "Transformer: def __init__(self): self.T = [] def transform(self, program): return", "\"\"\" Dagoba.addTransformer = function(fun, priority) { if(typeof fun != 'function')", "return program.map(function(step) { if(step[0] != newname) return step return [oldname,", "} \"\"\" \"\"\" Dagoba.transform = function(program) { return Dagoba.T.reduce(function(acc, transformer)", "Dagoba.T[i].priority) break Dagoba.T.splice(i, 0, {priority: priority, fun: fun}) } \"\"\"", "ensuring is a function, because we’re going to evaluate it", "there won’t be an enormous number of transformer additions, and", "step return [oldname, Dagoba.extend(step[1], defaults)] }) }, 100) # these", "return [oldname, Dagoba.extend(step[1], defaults)] }) }, 100) # these need", "priority level. Higher priority transformers are placed closer to the", "if(priority > Dagoba.T[i].priority) break Dagoba.T.splice(i, 0, {priority: priority, fun: fun})", "speed up short lists. \"\"\" class Transformer: def __init__(self): self.T", "and returns a program, plus a priority level. Higher priority", "a function, because we’re going to evaluate it later 31", "i < Dagoba.T.length; i++) # OPT: binary search if(priority >", "that accepts a program and returns a program, plus a", "of the list. We’re ensuring is a function, because we’re", "a new one. We’ll leave a note in case this", "eye) \"\"\" \"\"\" Dagoba.addTransformer = function(fun, priority) { if(typeof fun", "the front of the list. We’re ensuring is a function,", "lists, but adds a little complexity and doesn’t really speed", "are placed closer to the front of the list. We’re", "[] def transform(self, program): return program \"\"\" Dagoba.T = []", "[] # default arguments for the alias Dagoba.addPipetype(newname, function() {})", "fun != 'function') return Dagoba.error('Invalid transformer function') for(var i =", "{}) # because there's no method catchall in js Dagoba.addTransformer(function(program)" ]
[ "110 POLLING = 120 CALLBACK = 130 SUCCESS = 200", "300 # 任务重试 FAILURE = 400 # 任务执行失败 REVOKED =", "# 任务重试 FAILURE = 400 # 任务执行失败 REVOKED = 500", "2020-12-2 @desc: ... \"\"\" class JobStatus(object): PENDING = 0 #", "\"\"\" class JobStatus(object): PENDING = 0 # 任务等待执行 STARTED =", "-*- \"\"\" @auth: cml @date: 2020-12-2 @desc: ... \"\"\" class", "100 # 任务执行开始 PROCESS = 110 POLLING = 120 CALLBACK", "RETRY = 300 # 任务重试 FAILURE = 400 # 任务执行失败", "200 # 任务执行成功 RETRY = 300 # 任务重试 FAILURE =", "CALLBACK = 130 SUCCESS = 200 # 任务执行成功 RETRY =", "= 100 # 任务执行开始 PROCESS = 110 POLLING = 120", "= 120 CALLBACK = 130 SUCCESS = 200 # 任务执行成功", "= 110 POLLING = 120 CALLBACK = 130 SUCCESS =", "<filename>yzcore/templates/project_template/src/const/_job.py #!/usr/bin/python3.6.8+ # -*- coding:utf-8 -*- \"\"\" @auth: cml @date:", "class JobStatus(object): PENDING = 0 # 任务等待执行 STARTED = 100", "# 任务执行成功 RETRY = 300 # 任务重试 FAILURE = 400", "#!/usr/bin/python3.6.8+ # -*- coding:utf-8 -*- \"\"\" @auth: cml @date: 2020-12-2", "# 任务等待执行 STARTED = 100 # 任务执行开始 PROCESS = 110", "... \"\"\" class JobStatus(object): PENDING = 0 # 任务等待执行 STARTED", "任务重试 FAILURE = 400 # 任务执行失败 REVOKED = 500 #", "cml @date: 2020-12-2 @desc: ... \"\"\" class JobStatus(object): PENDING =", "# 任务执行开始 PROCESS = 110 POLLING = 120 CALLBACK =", "= 130 SUCCESS = 200 # 任务执行成功 RETRY = 300", "130 SUCCESS = 200 # 任务执行成功 RETRY = 300 #", "-*- coding:utf-8 -*- \"\"\" @auth: cml @date: 2020-12-2 @desc: ...", "@date: 2020-12-2 @desc: ... \"\"\" class JobStatus(object): PENDING = 0", "= 300 # 任务重试 FAILURE = 400 # 任务执行失败 REVOKED", "任务等待执行 STARTED = 100 # 任务执行开始 PROCESS = 110 POLLING", "120 CALLBACK = 130 SUCCESS = 200 # 任务执行成功 RETRY", "任务执行成功 RETRY = 300 # 任务重试 FAILURE = 400 #", "STARTED = 100 # 任务执行开始 PROCESS = 110 POLLING =", "任务执行开始 PROCESS = 110 POLLING = 120 CALLBACK = 130", "POLLING = 120 CALLBACK = 130 SUCCESS = 200 #", "coding:utf-8 -*- \"\"\" @auth: cml @date: 2020-12-2 @desc: ... \"\"\"", "SUCCESS = 200 # 任务执行成功 RETRY = 300 # 任务重试", "@auth: cml @date: 2020-12-2 @desc: ... \"\"\" class JobStatus(object): PENDING", "= 0 # 任务等待执行 STARTED = 100 # 任务执行开始 PROCESS", "JobStatus(object): PENDING = 0 # 任务等待执行 STARTED = 100 #", "PENDING = 0 # 任务等待执行 STARTED = 100 # 任务执行开始", "# -*- coding:utf-8 -*- \"\"\" @auth: cml @date: 2020-12-2 @desc:", "0 # 任务等待执行 STARTED = 100 # 任务执行开始 PROCESS =", "PROCESS = 110 POLLING = 120 CALLBACK = 130 SUCCESS", "= 200 # 任务执行成功 RETRY = 300 # 任务重试 FAILURE", "\"\"\" @auth: cml @date: 2020-12-2 @desc: ... \"\"\" class JobStatus(object):", "FAILURE = 400 # 任务执行失败 REVOKED = 500 # 任务撤销", "@desc: ... \"\"\" class JobStatus(object): PENDING = 0 # 任务等待执行" ]
[ "carnê. \"\"\" def __init__(self, file_descr, landscape=False): # Tamanhos em px", "para modelo de página inteira :param boletoDados: Objeto com os", "= 27 self.fontSizeValue = 12 self.title = 'Boleto bancário' self.fileDescr", ":type boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\" raise NotImplementedError('Em desenvolvimento') def drawBoleto(self, boletoDados):", "= string.Template(self._load_template('recibo_sacado.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] = ''", "= img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco # Corpo tpl_data['cedente'] = boletoDados.cedente", "= [] for digit in digits: result.append('<span class=\"{0}\"></span>'.format(digit)) return ''.join(result)", "de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_caixa.html')) tpl_data", "+= '<p>{0}</p>'.format(instrucao) # Rodapé tpl_data['sacado_info'] = '' for linha_sacado in", "'w'], ['w', 'n', 'n', 'w', 'n'], ['n', 'w', 'n', 'w',", "Responsável por fazer o output do boleto em html. :copyright:", "digit in digits: result.append('<span class=\"{0}\"></span>'.format(digit)) return ''.join(result) def _grouper(self, n,", "Cabeçalho tpl_data['logo_img'] = '' if boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image)) aux", "'n'], ] class BoletoHTML(object): \"\"\"Geração do Boleto em HTML Esta", "tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data) def drawCanhoto(self, html): if", "''.join(result) def _grouper(self, n, iterable, fillvalue=None): \"\"\"grouper(3, 'ABCDEFG', 'x') -->", "txt = nfloat txt = txt.replace('.', ',') else: txt =", "width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle) def _load_template(self, template): pyboleto_dir = os.path.dirname(os.path.abspath(__file__))", "instrucao in boletoDados.instrucoes: tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao) # Rodapé tpl_data['sacado_info'] =", "self.fileDescr.write(self.html) else: with open(self.fileDescr, 'w') as fd: fd.write(self.html) def _formataValorParaExibir(self,", "self._grouper(2, code): digt1_repr = DIGITS[int(digt1)] digt2_repr = map(lambda x: x", "self.width = 750 self.widthCanhoto = 0 self.fontSizeTitle = 9 self.heightLine", "open(self.fileDescr, 'w') as fd: fd.write(self.html) def _formataValorParaExibir(self, nfloat): if nfloat:", "ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :param boletoDados2: Objeto", "boletoDados1, boletoDados2=None): \"\"\"Imprime um boleto tipo carnê com 2 boletos", "def drawCanhoto(self, html): if html: self.html += str(html) def printPage(self):", "= os.path.join(pyboleto_dir, 'templates', template) with open(template_path, 'r') as tpl: template_content", "= boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] = boletoDados.numero_documento tpl_data['especie_documento'] =", "ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados1: :class:`pyboleto.data.BoletoData` :type boletoDados2: :class:`pyboleto.data.BoletoData`", "2 boletos por página. :param boletoDados1: Objeto com os dados", "class=\"pagina\">' def save(self): \"\"\"Fecha boleto e constroi o arquivo\"\"\" self.html", "save(self): \"\"\"Fecha boleto e constroi o arquivo\"\"\" self.html += '</div></body></html>'", "a mesma interface, para fazer output em LaTeX, etc ...", "nfloat): if nfloat: txt = nfloat txt = txt.replace('.', ',')", "por fazer o output do boleto em html. :copyright: ©", "'n', 'w', 'w', 'n'], ['w', 'n', 'n', 'n', 'w'], ['n',", "_load_template(self, template): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) template_path = os.path.join(pyboleto_dir, 'templates', template)", "= nfloat txt = txt.replace('.', ',') else: txt = \"\"", "x: x + ' s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w', 'n", "self.html += tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self): self.html += '<hr />' def", "= boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] =", "string.Template(self._load_template('recibo_sacado.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] = '' if", "LaTeX, etc ... Esta classe pode imprimir boletos em formato", "data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] = boletoDados.numero_documento tpl_data['especie_documento']", "o output do boleto em html. :copyright: © 2012 by", "return template_content def _load_image(self, logo_image): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) image_path =", "preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\"", "tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco # Corpo tpl_data['cedente'] =", "e constroi o arquivo\"\"\" self.html += '</div></body></html>' if hasattr(self.fileDescr, 'write'):", "= boletoDados.linha_digitavel # Corpo data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y')", ":class:`pyboleto.data.BoletoData` :param boletoDados2: Objeto com os dados do boleto a", "_drawHorizontalCorteLine(self): self.html += '<hr />' def _drawReciboCaixa(self, boletoDados): \"\"\"Imprime o", "\"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def nextPage(self): \"\"\"Força início de", "'w', 'n', 'n', 'w'], ['w', 'w', 'n', 'n', 'n'], ['n',", "'w', 'n', 'w', 'n'], ] class BoletoHTML(object): \"\"\"Geração do Boleto", "file_descr, landscape=False): # Tamanhos em px self.width = 750 self.widthCanhoto", "template_content = tpl.read() return template_content def _load_image(self, logo_image): pyboleto_dir =", "Esta classe pode imprimir boletos em formato de carnê (2", "import izip_longest as zip_longest zip_longest # chamando para evitar erro", "modelo de página inteira :param boletoDados: Objeto com os dados", "with open(template_path, 'r') as tpl: template_content = tpl.read() return template_content", "o Recibo do Sacado para modelo de página inteira :param", "--> ABC DEF Gxx\"\"\" args = [iter(iterable)] * n return", "ser implementadas no futuro com a mesma interface, para fazer", "em px self.width = 750 self.widthCanhoto = 0 self.fontSizeTitle =", "['w', 'w', 'n', 'n', 'n'], ['n', 'n', 'w', 'n', 'w'],", "'n', 'n', 'w', 'n'], ['n', 'w', 'n', 'w', 'n'], ]", "Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl", "# chamando para evitar erro de nao uso do zip_longest", "tpl_data['quantidade'] = boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] = valor valor_doc", "com 2 boletos por página. :param boletoDados1: Objeto com os", "'x') --> ABC DEF Gxx\"\"\" args = [iter(iterable)] * n", "'w', 'w', 'n', 'n'], ['n', 'n', 'n', 'w', 'w'], ['w',", "ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl =", "boletoDados.especie_documento tpl_data['aceite'] = boletoDados.aceite data_process = boletoDados.data_processamento tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y')", "details. \"\"\" import os import string import sys import codecs", "BSD, see LICENSE for more details. \"\"\" import os import", "tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco # Corpo tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] =", "drawCanhoto(self, html): if html: self.html += str(html) def printPage(self): self.html", "'w', 'n', 'n', 'n'], ['n', 'n', 'w', 'n', 'w'], ['w',", "Esta classe é responsável por imprimir o boleto em HTML.", "tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] = boletoDados.sacado[0] tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['numero_documento']", "boletoDados: Objeto com os dados do boleto a ser preenchido.", "= data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] = boletoDados.sacado[0] tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['numero_documento'] =", "boletoDados): \"\"\"Imprime o Recibo do Caixa :param boletoDados: Objeto com", "= boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento tpl_data['data_documento'] =", "página. :param boletoDados1: Objeto com os dados do boleto a", "font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle) def _load_template(self, template): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) template_path", "in digits: result.append('<span class=\"{0}\"></span>'.format(digit)) return ''.join(result) def _grouper(self, n, iterable,", "= txt.replace('.', ',') else: txt = \"\" return txt def", "tpl_data['linha_digitavel'] = boletoDados.linha_digitavel # Corpo data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] =", "tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle) def _load_template(self, template): pyboleto_dir =", "arquivo com várias páginas, uma por boleto. :param boletoDados: Objeto", "tipo carnê. \"\"\" def __init__(self, file_descr, landscape=False): # Tamanhos em", "sys import codecs import base64 from itertools import chain if", "as fd: fd.write(self.html) def _formataValorParaExibir(self, nfloat): if nfloat: txt =", "-*- \"\"\" pyboleto.html ~~~~~~~~~~~~~ Classe Responsável por fazer o output", "['w', 'n', 'w', 'n', 'n'], ['n', 'w', 'w', 'n', 'n'],", "pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) image_path = os.path.join(pyboleto_dir, 'media', logo_image) return image_path", "uso do zip_longest else: from itertools import zip_longest DIGITS =", "NotImplementedError('Em desenvolvimento...') else: tpl = string.Template(self._load_template('head.html')) self.html = tpl.substitute(title=self.title, width=self.width,", "' s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w', 'n s', 'n']) result", "else: from itertools import zip_longest DIGITS = [ ['n', 'n',", ":class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_caixa.html')) tpl_data =", "\"\"\"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx\"\"\" args = [iter(iterable)]", "= boletoDados.cedente_documento data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] =", "file_descr if landscape: raise NotImplementedError('Em desenvolvimento...') else: tpl = string.Template(self._load_template('head.html'))", "Deve ser subclasse de :class:`pyboleto.data.BoletoData` :param boletoDados2: Objeto com os", "para criar um arquivo com várias páginas, uma por boleto.", "# Corpo data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') # value", "no futuro com a mesma interface, para fazer output em", "com várias páginas, uma por boleto. :param boletoDados: Objeto com", "img.read() aux = base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64", "valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Demonstrativo tpl_data['demonstrativo'] =", "Demonstrativo tpl_data['demonstrativo'] = '' for dm in boletoDados.demonstrativo: tpl_data['demonstrativo'] +=", "['n', 'n s', 'n', 'n s'] if len(code) % 2", "desenvolvimento') def drawBoleto(self, boletoDados): \"\"\"Imprime Boleto Convencional Você pode chamar", "pode imprimir boletos em formato de carnê (2 boletos por", "a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :param boletoDados2:", "tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['numero_documento'] = boletoDados.numero_documento data_documento = boletoDados.data_documento tpl_data['data_documento']", "codecs import base64 from itertools import chain if sys.version_info <", "% 2 != 0: code = '0' + code for", "drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None): \"\"\"Imprime um boleto tipo carnê com 2", "'n', 'n'], ['n', 'n', 'n', 'w', 'w'], ['w', 'n', 'n',", "tpl = string.Template(self._load_template('head.html')) self.html = tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle)", "com a mesma interface, para fazer output em LaTeX, etc", "height_line=self.heightLine, font_size_title=self.fontSizeTitle) def _load_template(self, template): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) template_path =", "self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Instruções tpl_data['instrucoes'] = '' for", "boletoDados.cedente_documento data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] = boletoDados.sacado[0]", "para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits = ['n', 'n s', 'n',", "classe pode imprimir boletos em formato de carnê (2 boletos", "+ ' s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w', 'n s', 'n'])", "] class BoletoHTML(object): \"\"\"Geração do Boleto em HTML Esta classe", "base64 from itertools import chain if sys.version_info < (3,): from", "interface, para fazer output em LaTeX, etc ... Esta classe", "boletoDados.numero_documento data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] = boletoDados.cedente_endereco", "= os.path.dirname(os.path.abspath(__file__)) image_path = os.path.join(pyboleto_dir, 'media', logo_image) return image_path def", "s'] if len(code) % 2 != 0: code = '0'", "\"\"\"Força início de nova página\"\"\" self.html += '</div><div class=\"pagina\">' def", "\"\"\" pyboleto.html ~~~~~~~~~~~~~ Classe Responsável por fazer o output do", "chamar este método diversas vezes para criar um arquivo com", "map(lambda x: x + ' s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w',", "Caixa :param boletoDados: Objeto com os dados do boleto a", "if landscape: raise NotImplementedError('Em desenvolvimento...') else: tpl = string.Template(self._load_template('head.html')) self.html", "n, iterable, fillvalue=None): \"\"\"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx\"\"\"", "\"\"\" tpl = string.Template(self._load_template('recibo_caixa.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img']", "itertools import zip_longest DIGITS = [ ['n', 'n', 'w', 'w',", "= map(lambda x: x + ' s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr)))", "aux = base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco']", "fd: fd.write(self.html) def _formataValorParaExibir(self, nfloat): if nfloat: txt = nfloat", "de barras tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data) def drawCanhoto(self,", "code = '0' + code for digt1, digt2 in self._grouper(2,", "a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados:", "podem ser implementadas no futuro com a mesma interface, para", "return txt def _codigoBarraI25(self, code): \"\"\"Imprime Código de barras otimizado", "else: txt = \"\" return txt def _codigoBarraI25(self, code): \"\"\"Imprime", "self.title = 'Boleto bancário' self.fileDescr = file_descr if landscape: raise", ":param boletoDados1: Objeto com os dados do boleto a ser", "Você pode chamar este método diversas vezes para criar um", "utf-8 -*- \"\"\" pyboleto.html ~~~~~~~~~~~~~ Classe Responsável por fazer o", "= data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['carteira'] = boletoDados.carteira tpl_data['especie'] =", "open(template_path, 'r') as tpl: template_content = tpl.read() return template_content def", "tpl.substitute(tpl_data) def drawCanhoto(self, html): if html: self.html += str(html) def", "template): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) template_path = os.path.join(pyboleto_dir, 'templates', template) with", "_drawReciboCaixa(self, boletoDados): \"\"\"Imprime o Recibo do Caixa :param boletoDados: Objeto", "image_path = os.path.join(pyboleto_dir, 'media', logo_image) return image_path def _drawReciboSacado(self, boletoDados):", "boletoDados.local_pagamento tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento", "boletoDados.instrucoes: tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao) # Rodapé tpl_data['sacado_info'] = '' for", "self._drawHorizontalCorteLine() def nextPage(self): \"\"\"Força início de nova página\"\"\" self.html +=", "data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] = boletoDados.sacado[0] tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['numero_documento'] = boletoDados.numero_documento", "_codigoBarraI25(self, code): \"\"\"Imprime Código de barras otimizado para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5", "'w', 'n', 'n'], ['n', 'n', 'n', 'w', 'w'], ['w', 'n',", "self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data) def drawCanhoto(self, html): if html: self.html", "self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] = valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc", "várias páginas, uma por boleto. :param boletoDados: Objeto com os", "for digt1, digt2 in self._grouper(2, code): digt1_repr = DIGITS[int(digt1)] digt2_repr", "classe é responsável por imprimir o boleto em HTML. Outras", "= 'Boleto bancário' self.fileDescr = file_descr if landscape: raise NotImplementedError('Em", "'w', 'w', 'n'], ['w', 'n', 'n', 'n', 'w'], ['n', 'w',", "<gh_stars>0 # -*- coding: utf-8 -*- \"\"\" pyboleto.html ~~~~~~~~~~~~~ Classe", "= data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] = boletoDados.numero_documento tpl_data['especie_documento'] = boletoDados.especie_documento tpl_data['aceite'] =", "= self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Demonstrativo tpl_data['demonstrativo'] = ''", "txt def _codigoBarraI25(self, code): \"\"\"Imprime Código de barras otimizado para", "= self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data) def drawCanhoto(self, html): if html:", "= boletoDados.especie tpl_data['quantidade'] = boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] =", "valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Instruções tpl_data['instrucoes'] =", "digt2_repr))) digits.extend(['w', 'n s', 'n']) result = [] for digit", "subclasse de :class:`pyboleto.data.BoletoData` :param boletoDados2: Objeto com os dados do", ":type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_sacado.html')) tpl_data = {}", "class BoletoHTML(object): \"\"\"Geração do Boleto em HTML Esta classe é", "tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] = boletoDados.numero_documento tpl_data['especie_documento'] = boletoDados.especie_documento tpl_data['aceite']", "Sacado para modelo de página inteira :param boletoDados: Objeto com", "Tamanhos em px self.width = 750 self.widthCanhoto = 0 self.fontSizeTitle", ":param boletoDados: Objeto com os dados do boleto a ser", ":type boletoDados1: :class:`pyboleto.data.BoletoData` :type boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\" raise NotImplementedError('Em desenvolvimento')", "tpl_data['sacado'] = boletoDados.sacado[0] tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['numero_documento'] = boletoDados.numero_documento data_documento", "do Sacado para modelo de página inteira :param boletoDados: Objeto", "data_vencimento.strftime('%d/%m/%Y') # value em unicode em data.py if isinstance(boletoDados.local_pagamento, unicode):", "hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html) else: with open(self.fileDescr, 'w') as fd: fd.write(self.html)", "s', 'n']) result = [] for digit in digits: result.append('<span", "= boletoDados.carteira tpl_data['especie'] = boletoDados.especie tpl_data['quantidade'] = boletoDados.quantidade valor =", "\"\"\" import os import string import sys import codecs import", "tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao) # Rodapé tpl_data['sacado_info'] = '' for linha_sacado", "Convencional Você pode chamar este método diversas vezes para criar", "cheia. :param file_descr: Um arquivo ou *file-like* class. :param landscape:", "'n', 'n', 'w', 'w'], ['w', 'n', 'n', 'w', 'n'], ['n',", "12 self.title = 'Boleto bancário' self.fileDescr = file_descr if landscape:", "linha_sacado in boletoDados.sacado: tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado) # Código de barras", "raise NotImplementedError('Em desenvolvimento') def drawBoleto(self, boletoDados): \"\"\"Imprime Boleto Convencional Você", "+= '<p>{0}</p>'.format(linha_sacado) # Código de barras tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode) self.html", "<NAME> :license: BSD, see LICENSE for more details. \"\"\" import", "def _drawReciboSacado(self, boletoDados): \"\"\"Imprime o Recibo do Sacado para modelo", "data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] = boletoDados.cedente_endereco valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc", "tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode ('utf-8') else: tpl_data['local_pagamento'] = boletoDados.local_pagamento tpl_data['cedente'] =", "NotImplementedError('Em desenvolvimento') def drawBoleto(self, boletoDados): \"\"\"Imprime Boleto Convencional Você pode", "= valor_doc # Demonstrativo tpl_data['demonstrativo'] = '' for dm in", "def nextPage(self): \"\"\"Força início de nova página\"\"\" self.html += '</div><div", "img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco tpl_data['linha_digitavel']", "= '0' + code for digt1, digt2 in self._grouper(2, code):", "data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] = boletoDados.sacado[0] tpl_data['nosso_numero_format']", "o boleto em HTML. Outras classes podem ser implementadas no", "carnê com 2 boletos por página. :param boletoDados1: Objeto com", "import base64 from itertools import chain if sys.version_info < (3,):", "do Boleto em HTML Esta classe é responsável por imprimir", "if sys.version_info < (3,): from itertools import izip_longest as zip_longest", "27 self.fontSizeValue = 12 self.title = 'Boleto bancário' self.fileDescr =", "'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] = boletoDados.linha_digitavel", "= 12 self.title = 'Boleto bancário' self.fileDescr = file_descr if", "criar um arquivo com várias páginas, uma por boleto. :param", "boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] = valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento)", "nao uso do zip_longest else: from itertools import zip_longest DIGITS", "\"\"\"Imprime um boleto tipo carnê com 2 boletos por página.", "\"\"\"Imprime o Recibo do Sacado para modelo de página inteira", "tpl_data['numero_documento'] = boletoDados.numero_documento tpl_data['especie_documento'] = boletoDados.especie_documento tpl_data['aceite'] = boletoDados.aceite data_process", "Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados)", "tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] = boletoDados.linha_digitavel #", "\"\"\" def __init__(self, file_descr, landscape=False): # Tamanhos em px self.width", "tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm) self.html += tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self): self.html +=", "def drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None): \"\"\"Imprime um boleto tipo carnê com", "if nfloat: txt = nfloat txt = txt.replace('.', ',') else:", "tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado) # Código de barras tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode)", "if isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode ('utf-8') else: tpl_data['local_pagamento'] =", "um boleto tipo carnê com 2 boletos por página. :param", "HTML. Outras classes podem ser implementadas no futuro com a", "de barras otimizado para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits = ['n',", "Recibo do Sacado para modelo de página inteira :param boletoDados:", ":class:`pyboleto.data.BoletoData` \"\"\" raise NotImplementedError('Em desenvolvimento') def drawBoleto(self, boletoDados): \"\"\"Imprime Boleto", "= codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read() aux = base64.b64encode(aux) img_base64 =", "= self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] = valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] =", "digits.extend(['w', 'n s', 'n']) result = [] for digit in", "\"\"\" tpl = string.Template(self._load_template('recibo_sacado.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img']", "data.py if isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode ('utf-8') else: tpl_data['local_pagamento']", "boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :param", "formato de carnê (2 boletos por página) ou em formato", "-*- coding: utf-8 -*- \"\"\" pyboleto.html ~~~~~~~~~~~~~ Classe Responsável por", "self.html += str(html) def printPage(self): self.html += '<script>window.print();</script>' def drawBoletoCarneDuplo(self,", "= boletoDados.cedente_endereco valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Demonstrativo", "barras tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data) def drawCanhoto(self, html):", "boletoDados): \"\"\"Imprime o Recibo do Sacado para modelo de página", "'n'], ['n', 'n', 'n', 'w', 'w'], ['w', 'n', 'n', 'w',", "dados do boleto a ser preenchido. Deve ser subclasse de", "img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] = boletoDados.linha_digitavel # Corpo data_vencimento", "txt = txt.replace('.', ',') else: txt = \"\" return txt", "= valor_doc # Instruções tpl_data['instrucoes'] = '' for instrucao in", "# Rodapé tpl_data['sacado_info'] = '' for linha_sacado in boletoDados.sacado: tpl_data['sacado_info']", "= 0 self.fontSizeTitle = 9 self.heightLine = 27 self.fontSizeValue =", "'n', 'w', 'n', 'n'], ['n', 'w', 'w', 'n', 'n'], ['n',", "output do boleto em html. :copyright: © 2012 by <NAME>", "'n', 'n', 'n', 'w'], ['n', 'w', 'n', 'n', 'w'], ['w',", ":class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_caixa.html')) tpl_data = {} # Cabeçalho", "digt2 in self._grouper(2, code): digt1_repr = DIGITS[int(digt1)] digt2_repr = map(lambda", "arquivo\"\"\" self.html += '</div></body></html>' if hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html) else: with", "'n', 'n'], ['n', 'n', 'w', 'n', 'w'], ['w', 'n', 'w',", "digt1, digt2 in self._grouper(2, code): digt1_repr = DIGITS[int(digt1)] digt2_repr =", "def _drawHorizontalCorteLine(self): self.html += '<hr />' def _drawReciboCaixa(self, boletoDados): \"\"\"Imprime", "boletos em formato de carnê (2 boletos por página) ou", "'w', 'w'], ['w', 'n', 'n', 'w', 'n'], ['n', 'w', 'n',", "de carnê (2 boletos por página) ou em formato de", "# Instruções tpl_data['instrucoes'] = '' for instrucao in boletoDados.instrucoes: tpl_data['instrucoes']", "= img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] = boletoDados.linha_digitavel # Corpo", "= 750 self.widthCanhoto = 0 self.fontSizeTitle = 9 self.heightLine =", "+= '</div></body></html>' if hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html) else: with open(self.fileDescr, 'w')", "'0' + code for digt1, digt2 in self._grouper(2, code): digt1_repr", "uma por boleto. :param boletoDados: Objeto com os dados do", "boleto em HTML. Outras classes podem ser implementadas no futuro", "boletoDados.sacado[0] tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['numero_documento'] = boletoDados.numero_documento data_documento = boletoDados.data_documento", "Formato da folha. Usar ``True`` para boleto tipo carnê. \"\"\"", "see LICENSE for more details. \"\"\" import os import string", "dm in boletoDados.demonstrativo: tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm) self.html += tpl.substitute(tpl_data) def", "if hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html) else: with open(self.fileDescr, 'w') as fd:", "len(code) % 2 != 0: code = '0' + code", "self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Demonstrativo tpl_data['demonstrativo'] = '' for", "nfloat: txt = nfloat txt = txt.replace('.', ',') else: txt", "else: tpl_data['local_pagamento'] = boletoDados.local_pagamento tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente", "'Boleto bancário' self.fileDescr = file_descr if landscape: raise NotImplementedError('Em desenvolvimento...')", "ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData`", "img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco #", "(3,): from itertools import izip_longest as zip_longest zip_longest # chamando", "coding: utf-8 -*- \"\"\" pyboleto.html ~~~~~~~~~~~~~ Classe Responsável por fazer", "'n'], ['n', 'n', 'w', 'n', 'w'], ['w', 'n', 'w', 'n',", "codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read() aux = base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux)", "boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] = boletoDados.cedente_endereco valor_doc = self._formataValorParaExibir(boletoDados.valor_documento)", "< (3,): from itertools import izip_longest as zip_longest zip_longest #", "subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados)", "= '' for dm in boletoDados.demonstrativo: tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm) self.html", "'w', 'n', 'n'], ['n', 'w', 'w', 'n', 'n'], ['n', 'n',", "boletoDados.especie tpl_data['quantidade'] = boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] = valor", "= data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] = boletoDados.cedente_endereco valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] =", "pode chamar este método diversas vezes para criar um arquivo", "os.path.dirname(os.path.abspath(__file__)) image_path = os.path.join(pyboleto_dir, 'media', logo_image) return image_path def _drawReciboSacado(self,", "boletoDados2: Objeto com os dados do boleto a ser preenchido.", "'media', logo_image) return image_path def _drawReciboSacado(self, boletoDados): \"\"\"Imprime o Recibo", "unicode em data.py if isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode ('utf-8')", "nfloat txt = txt.replace('.', ',') else: txt = \"\" return", "data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] = boletoDados.cedente_endereco valor_doc", "img = codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read() aux = base64.b64encode(aux) img_base64", "tpl_data['local_pagamento'] = boletoDados.local_pagamento tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente data_documento", "preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :param boletoDados2: Objeto com", "preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados1: :class:`pyboleto.data.BoletoData` :type", "etc ... Esta classe pode imprimir boletos em formato de", "self.html += '<hr />' def _drawReciboCaixa(self, boletoDados): \"\"\"Imprime o Recibo", "Corpo tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] = boletoDados.cedente_documento", "data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] = boletoDados.numero_documento tpl_data['especie_documento'] = boletoDados.especie_documento tpl_data['aceite'] = boletoDados.aceite", "em LaTeX, etc ... Esta classe pode imprimir boletos em", "fillvalue=None): \"\"\"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx\"\"\" args =", "tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['carteira'] = boletoDados.carteira tpl_data['especie'] = boletoDados.especie tpl_data['quantidade']", "vezes para criar um arquivo com várias páginas, uma por", "boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type", "boletoDados.numero_documento tpl_data['especie_documento'] = boletoDados.especie_documento tpl_data['aceite'] = boletoDados.aceite data_process = boletoDados.data_processamento", "boleto e constroi o arquivo\"\"\" self.html += '</div></body></html>' if hasattr(self.fileDescr,", "unicode): tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode ('utf-8') else: tpl_data['local_pagamento'] = boletoDados.local_pagamento tpl_data['cedente']", "= boletoDados.local_pagamento tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente data_documento =", "'ABCDEFG', 'x') --> ABC DEF Gxx\"\"\" args = [iter(iterable)] *", "'n']) result = [] for digit in digits: result.append('<span class=\"{0}\"></span>'.format(digit))", "+= '<p>{0}</p>'.format(dm) self.html += tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self): self.html += '<hr", "'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco # Corpo tpl_data['cedente']", "+= '<script>window.print();</script>' def drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None): \"\"\"Imprime um boleto tipo", "2012 by <NAME> :license: BSD, see LICENSE for more details.", "boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] = boletoDados.numero_documento", "[] for digit in digits: result.append('<span class=\"{0}\"></span>'.format(digit)) return ''.join(result) def", "boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] = boletoDados.cedente_documento data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y')", "string import sys import codecs import base64 from itertools import", "+= str(html) def printPage(self): self.html += '<script>window.print();</script>' def drawBoletoCarneDuplo(self, boletoDados1,", "data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') # value em unicode", "'n', 'n', 'n'], ['n', 'n', 'w', 'n', 'w'], ['w', 'n',", "tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') # value em unicode em data.py if", "'' for dm in boletoDados.demonstrativo: tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm) self.html +=", "if html: self.html += str(html) def printPage(self): self.html += '<script>window.print();</script>'", "Objeto com os dados do boleto a ser preenchido. Deve", ":copyright: © 2012 by <NAME> :license: BSD, see LICENSE for", "'n', 'w'], ['w', 'w', 'n', 'n', 'n'], ['n', 'n', 'w',", "= 9 self.heightLine = 27 self.fontSizeValue = 12 self.title =", "# Código de barras tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data)", "value em unicode em data.py if isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento'] =", "'</div><div class=\"pagina\">' def save(self): \"\"\"Fecha boleto e constroi o arquivo\"\"\"", "em formato de carnê (2 boletos por página) ou em", "os import string import sys import codecs import base64 from", "class=\"{0}\"></span>'.format(digit)) return ''.join(result) def _grouper(self, n, iterable, fillvalue=None): \"\"\"grouper(3, 'ABCDEFG',", "itertools import izip_longest as zip_longest zip_longest # chamando para evitar", "self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def nextPage(self): \"\"\"Força início de nova página\"\"\" self.html", "fazer output em LaTeX, etc ... Esta classe pode imprimir", "= file_descr if landscape: raise NotImplementedError('Em desenvolvimento...') else: tpl =", "da folha. Usar ``True`` para boleto tipo carnê. \"\"\" def", "ou em formato de folha cheia. :param file_descr: Um arquivo", "0: code = '0' + code for digt1, digt2 in", "zip_longest DIGITS = [ ['n', 'n', 'w', 'w', 'n'], ['w',", "for linha_sacado in boletoDados.sacado: tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado) # Código de", "valor_doc # Demonstrativo tpl_data['demonstrativo'] = '' for dm in boletoDados.demonstrativo:", "= ['n', 'n s', 'n', 'n s'] if len(code) %", "tpl_data['sacado_info'] = '' for linha_sacado in boletoDados.sacado: tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado)", "boleto tipo carnê com 2 boletos por página. :param boletoDados1:", "tpl_data['carteira'] = boletoDados.carteira tpl_data['especie'] = boletoDados.especie tpl_data['quantidade'] = boletoDados.quantidade valor", "digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w', 'n s', 'n']) result = [] for", "def _formataValorParaExibir(self, nfloat): if nfloat: txt = nfloat txt =", "from itertools import izip_longest as zip_longest zip_longest # chamando para", "txt.replace('.', ',') else: txt = \"\" return txt def _codigoBarraI25(self,", "+= '<hr />' def _drawReciboCaixa(self, boletoDados): \"\"\"Imprime o Recibo do", "boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y')", "boletos por página) ou em formato de folha cheia. :param", "['w', 'n', 'n', 'w', 'n'], ['n', 'w', 'n', 'w', 'n'],", "as zip_longest zip_longest # chamando para evitar erro de nao", "base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco", "/>' def _drawReciboCaixa(self, boletoDados): \"\"\"Imprime o Recibo do Caixa :param", "implementadas no futuro com a mesma interface, para fazer output", "tpl_data['aceite'] = boletoDados.aceite data_process = boletoDados.data_processamento tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format']", "em unicode em data.py if isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode", "# Corpo tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] =", "def save(self): \"\"\"Fecha boleto e constroi o arquivo\"\"\" self.html +=", "'templates', template) with open(template_path, 'r') as tpl: template_content = tpl.read()", "in boletoDados.instrucoes: tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao) # Rodapé tpl_data['sacado_info'] = ''", "= '' if boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read()", "'n', 'w', 'n'], ] class BoletoHTML(object): \"\"\"Geração do Boleto em", "boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_caixa.html')) tpl_data = {} #", "str(html) def printPage(self): self.html += '<script>window.print();</script>' def drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None):", "for more details. \"\"\" import os import string import sys", "tpl_data['cedente_documento'] = boletoDados.cedente_documento data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado']", "barras otimizado para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits = ['n', 'n", "digits = ['n', 'n s', 'n', 'n s'] if len(code)", "boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read() aux = base64.b64encode(aux)", "em HTML. Outras classes podem ser implementadas no futuro com", "aux = img.read() aux = base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img']", "ABC DEF Gxx\"\"\" args = [iter(iterable)] * n return zip_longest(fillvalue=fillvalue,", "DIGITS[int(digt1)] digt2_repr = map(lambda x: x + ' s', DIGITS[int(digt2)])", "'' for linha_sacado in boletoDados.sacado: tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado) # Código", "'<p>{0}</p>'.format(dm) self.html += tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self): self.html += '<hr />'", "os.path.dirname(os.path.abspath(__file__)) template_path = os.path.join(pyboleto_dir, 'templates', template) with open(template_path, 'r') as", "tpl_data['logo_img'] = '' if boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image)) aux =", "``True`` para boleto tipo carnê. \"\"\" def __init__(self, file_descr, landscape=False):", ":param file_descr: Um arquivo ou *file-like* class. :param landscape: Formato", "'n'], ['w', 'n', 'n', 'n', 'w'], ['n', 'w', 'n', 'n',", "['n', 'w', 'w', 'n', 'n'], ['n', 'n', 'n', 'w', 'w'],", "tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self): self.html += '<hr />' def _drawReciboCaixa(self, boletoDados):", "este método diversas vezes para criar um arquivo com várias", "boletoDados.format_nosso_numero() tpl_data['numero_documento'] = boletoDados.numero_documento data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y')", "chain if sys.version_info < (3,): from itertools import izip_longest as", "self.html += tpl.substitute(tpl_data) def drawCanhoto(self, html): if html: self.html +=", "de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_sacado.html')) tpl_data", "'' if boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read() aux", "in boletoDados.sacado: tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado) # Código de barras tpl_data['barcode']", "'n', 'w', 'n'], ['n', 'w', 'n', 'w', 'n'], ] class", "code): \"\"\"Imprime Código de barras otimizado para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\"", "tpl_data['valor_documento'] = valor_doc # Demonstrativo tpl_data['demonstrativo'] = '' for dm", "em html. :copyright: © 2012 by <NAME> :license: BSD, see", "self.heightLine = 27 self.fontSizeValue = 12 self.title = 'Boleto bancário'", "self.widthCanhoto = 0 self.fontSizeTitle = 9 self.heightLine = 27 self.fontSizeValue", "em data.py if isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode ('utf-8') else:", "diversas vezes para criar um arquivo com várias páginas, uma", "more details. \"\"\" import os import string import sys import", "= boletoDados.numero_documento tpl_data['especie_documento'] = boletoDados.especie_documento tpl_data['aceite'] = boletoDados.aceite data_process =", "= os.path.dirname(os.path.abspath(__file__)) template_path = os.path.join(pyboleto_dir, 'templates', template) with open(template_path, 'r')", "s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w', 'n s', 'n']) result =", "html: self.html += str(html) def printPage(self): self.html += '<script>window.print();</script>' def", "bancário' self.fileDescr = file_descr if landscape: raise NotImplementedError('Em desenvolvimento...') else:", "Recibo do Caixa :param boletoDados: Objeto com os dados do", "Um arquivo ou *file-like* class. :param landscape: Formato da folha.", "self.fileDescr = file_descr if landscape: raise NotImplementedError('Em desenvolvimento...') else: tpl", "folha. Usar ``True`` para boleto tipo carnê. \"\"\" def __init__(self,", ":class:`pyboleto.data.BoletoData` :type boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\" raise NotImplementedError('Em desenvolvimento') def drawBoleto(self,", "= tpl.read() return template_content def _load_image(self, logo_image): pyboleto_dir = os.path.dirname(os.path.abspath(__file__))", "tpl_data = {} # Cabeçalho tpl_data['logo_img'] = '' if boletoDados.logo_image:", "Código de barras otimizado para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits =", "sys.version_info < (3,): from itertools import izip_longest as zip_longest zip_longest", "otimizado para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits = ['n', 'n s',", "mesma interface, para fazer output em LaTeX, etc ... Esta", "import zip_longest DIGITS = [ ['n', 'n', 'w', 'w', 'n'],", "self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def nextPage(self): \"\"\"Força início de nova página\"\"\"", "= os.path.join(pyboleto_dir, 'media', logo_image) return image_path def _drawReciboSacado(self, boletoDados): \"\"\"Imprime", "tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento']", "classes podem ser implementadas no futuro com a mesma interface,", "'write'): self.fileDescr.write(self.html) else: with open(self.fileDescr, 'w') as fd: fd.write(self.html) def", "em HTML Esta classe é responsável por imprimir o boleto", "um arquivo com várias páginas, uma por boleto. :param boletoDados:", "return ''.join(result) def _grouper(self, n, iterable, fillvalue=None): \"\"\"grouper(3, 'ABCDEFG', 'x')", "de :class:`pyboleto.data.BoletoData` :type boletoDados1: :class:`pyboleto.data.BoletoData` :type boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\" raise", "folha cheia. :param file_descr: Um arquivo ou *file-like* class. :param", "\"\"\" digits = ['n', 'n s', 'n', 'n s'] if", "['n', 'n', 'w', 'w', 'n'], ['w', 'n', 'n', 'n', 'w'],", "string.Template(self._load_template('head.html')) self.html = tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle) def _load_template(self,", ":type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_caixa.html')) tpl_data = {}", "erro de nao uso do zip_longest else: from itertools import", "= string.Template(self._load_template('head.html')) self.html = tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle) def", "+= '</div><div class=\"pagina\">' def save(self): \"\"\"Fecha boleto e constroi o", "_drawReciboSacado(self, boletoDados): \"\"\"Imprime o Recibo do Sacado para modelo de", "if len(code) % 2 != 0: code = '0' +", "fazer o output do boleto em html. :copyright: © 2012", "Outras classes podem ser implementadas no futuro com a mesma", "ser subclasse de :class:`pyboleto.data.BoletoData` :param boletoDados2: Objeto com os dados", "def _grouper(self, n, iterable, fillvalue=None): \"\"\"grouper(3, 'ABCDEFG', 'x') --> ABC", ":class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_sacado.html')) tpl_data = {} # Cabeçalho", "self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def nextPage(self): \"\"\"Força início de nova", "ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine()", "'</div></body></html>' if hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html) else: with open(self.fileDescr, 'w') as", "zip_longest else: from itertools import zip_longest DIGITS = [ ['n',", "http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits = ['n', 'n s', 'n', 'n s']", "'n', 'n', 'w'], ['n', 'w', 'n', 'n', 'w'], ['w', 'w',", "imprimir boletos em formato de carnê (2 boletos por página)", "font_size_title=self.fontSizeTitle) def _load_template(self, template): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) template_path = os.path.join(pyboleto_dir,", "tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] = boletoDados.cedente_documento data_vencimento", "# value em unicode em data.py if isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento']", "= DIGITS[int(digt1)] digt2_repr = map(lambda x: x + ' s',", "'w'], ['n', 'w', 'n', 'n', 'w'], ['w', 'w', 'n', 'n',", "template_content def _load_image(self, logo_image): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) image_path = os.path.join(pyboleto_dir,", "boletoDados.aceite data_process = boletoDados.data_processamento tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero()", "data_process = boletoDados.data_processamento tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['carteira']", "arquivo ou *file-like* class. :param landscape: Formato da folha. Usar", "self.html += '<script>window.print();</script>' def drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None): \"\"\"Imprime um boleto", "boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] = boletoDados.linha_digitavel # Corpo data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento']", "',') else: txt = \"\" return txt def _codigoBarraI25(self, code):", "with open(self.fileDescr, 'w') as fd: fd.write(self.html) def _formataValorParaExibir(self, nfloat): if", "nova página\"\"\" self.html += '</div><div class=\"pagina\">' def save(self): \"\"\"Fecha boleto", "por imprimir o boleto em HTML. Outras classes podem ser", "boleto tipo carnê. \"\"\" def __init__(self, file_descr, landscape=False): # Tamanhos", "logo_image): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) image_path = os.path.join(pyboleto_dir, 'media', logo_image) return", "# Demonstrativo tpl_data['demonstrativo'] = '' for dm in boletoDados.demonstrativo: tpl_data['demonstrativo']", "'<hr />' def _drawReciboCaixa(self, boletoDados): \"\"\"Imprime o Recibo do Caixa", "boletoDados.data_processamento tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['carteira'] = boletoDados.carteira", "= data_vencimento.strftime('%d/%m/%Y') # value em unicode em data.py if isinstance(boletoDados.local_pagamento,", "'<p>{0}</p>'.format(instrucao) # Rodapé tpl_data['sacado_info'] = '' for linha_sacado in boletoDados.sacado:", "def drawBoleto(self, boletoDados): \"\"\"Imprime Boleto Convencional Você pode chamar este", "{} # Cabeçalho tpl_data['logo_img'] = '' if boletoDados.logo_image: img =", ":class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def", "tipo carnê com 2 boletos por página. :param boletoDados1: Objeto", "de nao uso do zip_longest else: from itertools import zip_longest", "'n s', 'n']) result = [] for digit in digits:", "'w') as fd: fd.write(self.html) def _formataValorParaExibir(self, nfloat): if nfloat: txt", "do Caixa :param boletoDados: Objeto com os dados do boleto", "= boletoDados.codigo_dv_banco # Corpo tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente", "= boletoDados.local_pagamento.encode ('utf-8') else: tpl_data['local_pagamento'] = boletoDados.local_pagamento tpl_data['cedente'] = boletoDados.cedente", "zip_longest # chamando para evitar erro de nao uso do", "chamando para evitar erro de nao uso do zip_longest else:", "'w'], ['w', 'n', 'w', 'n', 'n'], ['n', 'w', 'w', 'n',", "'n', 'n s'] if len(code) % 2 != 0: code", "zip_longest zip_longest # chamando para evitar erro de nao uso", "= [ ['n', 'n', 'w', 'w', 'n'], ['w', 'n', 'n',", "txt = \"\" return txt def _codigoBarraI25(self, code): \"\"\"Imprime Código", "boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_sacado.html')) tpl_data = {} #", "para fazer output em LaTeX, etc ... Esta classe pode", "boletoDados.sacado: tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado) # Código de barras tpl_data['barcode'] =", "boleto. :param boletoDados: Objeto com os dados do boleto a", "boletoDados): \"\"\"Imprime Boleto Convencional Você pode chamar este método diversas", "itertools import chain if sys.version_info < (3,): from itertools import", "= boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] = valor valor_doc =", "ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados1: :class:`pyboleto.data.BoletoData`", "\"\"\"Imprime Boleto Convencional Você pode chamar este método diversas vezes", "DEF Gxx\"\"\" args = [iter(iterable)] * n return zip_longest(fillvalue=fillvalue, *args)", "boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') # value em unicode em data.py", "Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados1: :class:`pyboleto.data.BoletoData` :type boletoDados2:", "self.html += '</div></body></html>' if hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html) else: with open(self.fileDescr,", "'n'], ['n', 'w', 'n', 'w', 'n'], ] class BoletoHTML(object): \"\"\"Geração", "'n', 'w'], ['w', 'n', 'w', 'n', 'n'], ['n', 'w', 'w',", "px self.width = 750 self.widthCanhoto = 0 self.fontSizeTitle = 9", "boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def nextPage(self): \"\"\"Força", "= string.Template(self._load_template('recibo_caixa.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] = ''", "do zip_longest else: from itertools import zip_longest DIGITS = [", "os.path.join(pyboleto_dir, 'templates', template) with open(template_path, 'r') as tpl: template_content =", "por página. :param boletoDados1: Objeto com os dados do boleto", "from itertools import chain if sys.version_info < (3,): from itertools", "boletoDados.linha_digitavel # Corpo data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') #", "Boleto Convencional Você pode chamar este método diversas vezes para", "subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados1: :class:`pyboleto.data.BoletoData` :type boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\"", "_formataValorParaExibir(self, nfloat): if nfloat: txt = nfloat txt = txt.replace('.',", ":class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_sacado.html')) tpl_data =", "tpl = string.Template(self._load_template('recibo_sacado.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] =", ":class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def nextPage(self): \"\"\"Força início", "tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] = boletoDados.cedente_endereco valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento']", "(2 boletos por página) ou em formato de folha cheia.", "\"\"\"Fecha boleto e constroi o arquivo\"\"\" self.html += '</div></body></html>' if", "!= 0: code = '0' + code for digt1, digt2", "tpl_data['valor_documento'] = valor_doc # Instruções tpl_data['instrucoes'] = '' for instrucao", "DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w', 'n s', 'n']) result = []", "página) ou em formato de folha cheia. :param file_descr: Um", "if boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read() aux =", "boletoDados.demonstrativo: tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm) self.html += tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self): self.html", "import os import string import sys import codecs import base64", "= tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle) def _load_template(self, template): pyboleto_dir", "Boleto em HTML Esta classe é responsável por imprimir o", "9 self.heightLine = 27 self.fontSizeValue = 12 self.title = 'Boleto", "subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_sacado.html'))", "fd.write(self.html) def _formataValorParaExibir(self, nfloat): if nfloat: txt = nfloat txt", "img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco # Corpo tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente']", "'n s'] if len(code) % 2 != 0: code =", "= boletoDados.sacado[0] tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['numero_documento'] = boletoDados.numero_documento data_documento =", "valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Instruções tpl_data['instrucoes']", "página inteira :param boletoDados: Objeto com os dados do boleto", "= boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] = boletoDados.cedente_documento data_vencimento =", "code): digt1_repr = DIGITS[int(digt1)] digt2_repr = map(lambda x: x +", "['n', 'w', 'n', 'n', 'w'], ['w', 'w', 'n', 'n', 'n'],", "import sys import codecs import base64 from itertools import chain", "DIGITS = [ ['n', 'n', 'w', 'w', 'n'], ['w', 'n',", "'<script>window.print();</script>' def drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None): \"\"\"Imprime um boleto tipo carnê", "= self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Instruções tpl_data['instrucoes'] = ''", "boletoDados1: Objeto com os dados do boleto a ser preenchido.", "pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) template_path = os.path.join(pyboleto_dir, 'templates', template) with open(template_path,", "import codecs import base64 from itertools import chain if sys.version_info", "'n', 'n', 'w'], ['w', 'w', 'n', 'n', 'n'], ['n', 'n',", "boletoDados.codigo_dv_banco # Corpo tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente tpl_data['cedente_documento']", "= boletoDados.numero_documento data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] =", "valor_doc # Instruções tpl_data['instrucoes'] = '' for instrucao in boletoDados.instrucoes:", "'n s', 'n', 'n s'] if len(code) % 2 !=", "boletos por página. :param boletoDados1: Objeto com os dados do", ":param landscape: Formato da folha. Usar ``True`` para boleto tipo", "Código de barras tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data) def", "método diversas vezes para criar um arquivo com várias páginas,", "tpl_data['cedente_endereco'] = boletoDados.cedente_endereco valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc #", "páginas, uma por boleto. :param boletoDados: Objeto com os dados", "self.fontSizeValue = 12 self.title = 'Boleto bancário' self.fileDescr = file_descr", "'w', 'n'], ] class BoletoHTML(object): \"\"\"Geração do Boleto em HTML", "= '' for instrucao in boletoDados.instrucoes: tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao) #", "= boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] = boletoDados.cedente_documento data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] =", "result.append('<span class=\"{0}\"></span>'.format(digit)) return ''.join(result) def _grouper(self, n, iterable, fillvalue=None): \"\"\"grouper(3,", "tpl_data['instrucoes'] = '' for instrucao in boletoDados.instrucoes: tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao)", "boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] = boletoDados.numero_documento tpl_data['especie_documento'] = boletoDados.especie_documento", "HTML Esta classe é responsável por imprimir o boleto em", "'w'], ['w', 'w', 'n', 'n', 'n'], ['n', 'n', 'w', 'n',", "class. :param landscape: Formato da folha. Usar ``True`` para boleto", "def _load_template(self, template): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) template_path = os.path.join(pyboleto_dir, 'templates',", "else: with open(self.fileDescr, 'w') as fd: fd.write(self.html) def _formataValorParaExibir(self, nfloat):", "self.fontSizeTitle = 9 self.heightLine = 27 self.fontSizeValue = 12 self.title", "desenvolvimento...') else: tpl = string.Template(self._load_template('head.html')) self.html = tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue,", "s', 'n', 'n s'] if len(code) % 2 != 0:", "('utf-8') else: tpl_data['local_pagamento'] = boletoDados.local_pagamento tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] =", "_load_image(self, logo_image): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) image_path = os.path.join(pyboleto_dir, 'media', logo_image)", "por boleto. :param boletoDados: Objeto com os dados do boleto", "in self._grouper(2, code): digt1_repr = DIGITS[int(digt1)] digt2_repr = map(lambda x:", "os.path.join(pyboleto_dir, 'media', logo_image) return image_path def _drawReciboSacado(self, boletoDados): \"\"\"Imprime o", "de :class:`pyboleto.data.BoletoData` :param boletoDados2: Objeto com os dados do boleto", "__init__(self, file_descr, landscape=False): # Tamanhos em px self.width = 750", "'n', 'w', 'w'], ['w', 'n', 'n', 'w', 'n'], ['n', 'w',", "750 self.widthCanhoto = 0 self.fontSizeTitle = 9 self.heightLine = 27", ":license: BSD, see LICENSE for more details. \"\"\" import os", "'w', 'n', 'w'], ['w', 'n', 'w', 'n', 'n'], ['n', 'w',", "self.html = tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle) def _load_template(self, template):", "\"\" return txt def _codigoBarraI25(self, code): \"\"\"Imprime Código de barras", "logo_image) return image_path def _drawReciboSacado(self, boletoDados): \"\"\"Imprime o Recibo do", "o Recibo do Caixa :param boletoDados: Objeto com os dados", "result = [] for digit in digits: result.append('<span class=\"{0}\"></span>'.format(digit)) return", "'n'], ['n', 'w', 'w', 'n', 'n'], ['n', 'n', 'n', 'w',", "de folha cheia. :param file_descr: Um arquivo ou *file-like* class.", "tpl_data['valor'] = valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc #", "responsável por imprimir o boleto em HTML. Outras classes podem", "def _drawReciboCaixa(self, boletoDados): \"\"\"Imprime o Recibo do Caixa :param boletoDados:", "= boletoDados.data_processamento tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['carteira'] =", "tpl_data['especie'] = boletoDados.especie tpl_data['quantidade'] = boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor) tpl_data['valor']", "['n', 'w', 'n', 'w', 'n'], ] class BoletoHTML(object): \"\"\"Geração do", "o arquivo\"\"\" self.html += '</div></body></html>' if hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html) else:", "x + ' s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w', 'n s',", "boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\" raise NotImplementedError('Em desenvolvimento') def drawBoleto(self, boletoDados): \"\"\"Imprime", "[ ['n', 'n', 'w', 'w', 'n'], ['w', 'n', 'n', 'n',", "code for digt1, digt2 in self._grouper(2, code): digt1_repr = DIGITS[int(digt1)]", "else: tpl = string.Template(self._load_template('head.html')) self.html = tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine,", "isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode ('utf-8') else: tpl_data['local_pagamento'] = boletoDados.local_pagamento", "def _codigoBarraI25(self, code): \"\"\"Imprime Código de barras otimizado para boletos", "em formato de folha cheia. :param file_descr: Um arquivo ou", "boletoDados.local_pagamento.encode ('utf-8') else: tpl_data['local_pagamento'] = boletoDados.local_pagamento tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente']", "tpl_data['especie_documento'] = boletoDados.especie_documento tpl_data['aceite'] = boletoDados.aceite data_process = boletoDados.data_processamento tpl_data['data_processamento']", ":type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def nextPage(self):", "digt2_repr = map(lambda x: x + ' s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr,", "tpl_data['demonstrativo'] = '' for dm in boletoDados.demonstrativo: tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm)", "\"\"\"Imprime Código de barras otimizado para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits", "do boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData`", "do boleto em html. :copyright: © 2012 by <NAME> :license:", "return image_path def _drawReciboSacado(self, boletoDados): \"\"\"Imprime o Recibo do Sacado", "'r') as tpl: template_content = tpl.read() return template_content def _load_image(self,", "constroi o arquivo\"\"\" self.html += '</div></body></html>' if hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html)", "inteira :param boletoDados: Objeto com os dados do boleto a", "output em LaTeX, etc ... Esta classe pode imprimir boletos", "html): if html: self.html += str(html) def printPage(self): self.html +=", "= '' for linha_sacado in boletoDados.sacado: tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado) #", "= img.read() aux = base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] =", "\"\"\" raise NotImplementedError('Em desenvolvimento') def drawBoleto(self, boletoDados): \"\"\"Imprime Boleto Convencional", "imprimir o boleto em HTML. Outras classes podem ser implementadas", "boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] = boletoDados.cedente_documento data_vencimento = boletoDados.data_vencimento", "os dados do boleto a ser preenchido. Deve ser subclasse", "drawBoleto(self, boletoDados): \"\"\"Imprime Boleto Convencional Você pode chamar este método", "def printPage(self): self.html += '<script>window.print();</script>' def drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None): \"\"\"Imprime", "para evitar erro de nao uso do zip_longest else: from", "ou *file-like* class. :param landscape: Formato da folha. Usar ``True``", "izip_longest as zip_longest zip_longest # chamando para evitar erro de", "for digit in digits: result.append('<span class=\"{0}\"></span>'.format(digit)) return ''.join(result) def _grouper(self,", "0 self.fontSizeTitle = 9 self.heightLine = 27 self.fontSizeValue = 12", "= \"\" return txt def _codigoBarraI25(self, code): \"\"\"Imprime Código de", "tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] = boletoDados.linha_digitavel # Corpo data_vencimento =", "página\"\"\" self.html += '</div><div class=\"pagina\">' def save(self): \"\"\"Fecha boleto e", "'n', 'w'], ['n', 'w', 'n', 'n', 'w'], ['w', 'w', 'n',", "tpl = string.Template(self._load_template('recibo_caixa.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] =", "by <NAME> :license: BSD, see LICENSE for more details. \"\"\"", "= boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') # value em unicode em", "a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados1:", "'w', 'n'], ['n', 'w', 'n', 'w', 'n'], ] class BoletoHTML(object):", "\"\"\"Geração do Boleto em HTML Esta classe é responsável por", "template_path = os.path.join(pyboleto_dir, 'templates', template) with open(template_path, 'r') as tpl:", "de página inteira :param boletoDados: Objeto com os dados do", "for dm in boletoDados.demonstrativo: tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm) self.html += tpl.substitute(tpl_data)", "para boleto tipo carnê. \"\"\" def __init__(self, file_descr, landscape=False): #", "template) with open(template_path, 'r') as tpl: template_content = tpl.read() return", "tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['carteira'] = boletoDados.carteira tpl_data['especie']", "in boletoDados.demonstrativo: tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm) self.html += tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self):", "for instrucao in boletoDados.instrucoes: tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao) # Rodapé tpl_data['sacado_info']", "boleto em html. :copyright: © 2012 by <NAME> :license: BSD,", "nextPage(self): \"\"\"Força início de nova página\"\"\" self.html += '</div><div class=\"pagina\">'", ":param boletoDados2: Objeto com os dados do boleto a ser", "com os dados do boleto a ser preenchido. Deve ser", "= boletoDados.aceite data_process = boletoDados.data_processamento tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] =", "boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits = ['n', 'n s', 'n', 'n", "tpl_data['numero_documento'] = boletoDados.numero_documento data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco']", "['n', 'n', 'w', 'n', 'w'], ['w', 'n', 'w', 'n', 'n'],", "*file-like* class. :param landscape: Formato da folha. Usar ``True`` para", "landscape=False): # Tamanhos em px self.width = 750 self.widthCanhoto =", "iterable, fillvalue=None): \"\"\"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx\"\"\" args", "tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento tpl_data['data_documento']", "© 2012 by <NAME> :license: BSD, see LICENSE for more", "= {} # Cabeçalho tpl_data['logo_img'] = '' if boletoDados.logo_image: img", "evitar erro de nao uso do zip_longest else: from itertools", "= boletoDados.especie_documento tpl_data['aceite'] = boletoDados.aceite data_process = boletoDados.data_processamento tpl_data['data_processamento'] =", "'' for instrucao in boletoDados.instrucoes: tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao) # Rodapé", "+ code for digt1, digt2 in self._grouper(2, code): digt1_repr =", "'n', 'n'], ['n', 'w', 'w', 'n', 'n'], ['n', 'n', 'n',", "+= tpl.substitute(tpl_data) def drawCanhoto(self, html): if html: self.html += str(html)", "tpl: template_content = tpl.read() return template_content def _load_image(self, logo_image): pyboleto_dir", "_grouper(self, n, iterable, fillvalue=None): \"\"\"grouper(3, 'ABCDEFG', 'x') --> ABC DEF", "Rodapé tpl_data['sacado_info'] = '' for linha_sacado in boletoDados.sacado: tpl_data['sacado_info'] +=", "html. :copyright: © 2012 by <NAME> :license: BSD, see LICENSE", "def __init__(self, file_descr, landscape=False): # Tamanhos em px self.width =", "LICENSE for more details. \"\"\" import os import string import", "= boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] = boletoDados.linha_digitavel # Corpo data_vencimento = boletoDados.data_vencimento", "\"\"\"Imprime o Recibo do Caixa :param boletoDados: Objeto com os", "boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] = boletoDados.sacado[0] tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero()", "de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine()", "printPage(self): self.html += '<script>window.print();</script>' def drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None): \"\"\"Imprime um", "início de nova página\"\"\" self.html += '</div><div class=\"pagina\">' def save(self):", "Usar ``True`` para boleto tipo carnê. \"\"\" def __init__(self, file_descr,", "= 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco # Corpo", "2 != 0: code = '0' + code for digt1,", "= base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] =", "... Esta classe pode imprimir boletos em formato de carnê", "subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_caixa.html'))", "image_path def _drawReciboSacado(self, boletoDados): \"\"\"Imprime o Recibo do Sacado para", "+= tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self): self.html += '<hr />' def _drawReciboCaixa(self,", "'n', 'w', 'n', 'w'], ['w', 'n', 'w', 'n', 'n'], ['n',", "landscape: raise NotImplementedError('Em desenvolvimento...') else: tpl = string.Template(self._load_template('head.html')) self.html =", "boletoDados.format_nosso_numero() tpl_data['carteira'] = boletoDados.carteira tpl_data['especie'] = boletoDados.especie tpl_data['quantidade'] = boletoDados.quantidade", "['w', 'n', 'n', 'n', 'w'], ['n', 'w', 'n', 'n', 'w'],", "# -*- coding: utf-8 -*- \"\"\" pyboleto.html ~~~~~~~~~~~~~ Classe Responsável", "['n', 'n', 'n', 'w', 'w'], ['w', 'n', 'n', 'w', 'n'],", "Classe Responsável por fazer o output do boleto em html.", "# Tamanhos em px self.width = 750 self.widthCanhoto = 0", "Instruções tpl_data['instrucoes'] = '' for instrucao in boletoDados.instrucoes: tpl_data['instrucoes'] +=", "digits: result.append('<span class=\"{0}\"></span>'.format(digit)) return ''.join(result) def _grouper(self, n, iterable, fillvalue=None):", "boletoDados.carteira tpl_data['especie'] = boletoDados.especie tpl_data['quantidade'] = boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor)", "pyboleto.html ~~~~~~~~~~~~~ Classe Responsável por fazer o output do boleto", "import string import sys import codecs import base64 from itertools", "landscape: Formato da folha. Usar ``True`` para boleto tipo carnê.", "boletoDados1: :class:`pyboleto.data.BoletoData` :type boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\" raise NotImplementedError('Em desenvolvimento') def", "por página) ou em formato de folha cheia. :param file_descr:", "= boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] = boletoDados.cedente_endereco valor_doc =", "Corpo data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') # value em", "= boletoDados.format_nosso_numero() tpl_data['carteira'] = boletoDados.carteira tpl_data['especie'] = boletoDados.especie tpl_data['quantidade'] =", "BoletoHTML(object): \"\"\"Geração do Boleto em HTML Esta classe é responsável", "from itertools import zip_longest DIGITS = [ ['n', 'n', 'w',", "data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['carteira'] = boletoDados.carteira tpl_data['especie'] = boletoDados.especie", "= boletoDados.format_nosso_numero() tpl_data['numero_documento'] = boletoDados.numero_documento data_documento = boletoDados.data_documento tpl_data['data_documento'] =", "import chain if sys.version_info < (3,): from itertools import izip_longest", "def _load_image(self, logo_image): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) image_path = os.path.join(pyboleto_dir, 'media',", "string.Template(self._load_template('recibo_caixa.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] = '' if", "valor = self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] = valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento']", "'w', 'n'], ['w', 'n', 'n', 'n', 'w'], ['n', 'w', 'n',", "'<p>{0}</p>'.format(linha_sacado) # Código de barras tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode) self.html +=", "as tpl: template_content = tpl.read() return template_content def _load_image(self, logo_image):", "futuro com a mesma interface, para fazer output em LaTeX,", "carnê (2 boletos por página) ou em formato de folha", "~~~~~~~~~~~~~ Classe Responsável por fazer o output do boleto em", "= boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] = boletoDados.sacado[0] tpl_data['nosso_numero_format'] =", "formato de folha cheia. :param file_descr: Um arquivo ou *file-like*", "tpl.read() return template_content def _load_image(self, logo_image): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) image_path", "raise NotImplementedError('Em desenvolvimento...') else: tpl = string.Template(self._load_template('head.html')) self.html = tpl.substitute(title=self.title,", "self.html += '</div><div class=\"pagina\">' def save(self): \"\"\"Fecha boleto e constroi", "= valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Instruções", "= 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] =", "digt1_repr = DIGITS[int(digt1)] digt2_repr = map(lambda x: x + '", "é responsável por imprimir o boleto em HTML. Outras classes", ":class:`pyboleto.data.BoletoData` :type boletoDados1: :class:`pyboleto.data.BoletoData` :type boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\" raise NotImplementedError('Em", "de nova página\"\"\" self.html += '</div><div class=\"pagina\">' def save(self): \"\"\"Fecha", "boletoDados.cedente_endereco valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Demonstrativo tpl_data['demonstrativo']", "# Cabeçalho tpl_data['logo_img'] = '' if boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image))", "file_descr: Um arquivo ou *file-like* class. :param landscape: Formato da", "tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] = boletoDados.cedente_documento data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento']", "boletoDados2=None): \"\"\"Imprime um boleto tipo carnê com 2 boletos por" ]
[ "= 0 i = 0 while i < 5: summary", "0 while i < 5: summary = summary + i", "5: summary = summary + i print(summary) i = i", "= summary + i print(summary) i = i + 1", "= 0 while i < 5: summary = summary +", "i = 0 while i < 5: summary = summary", "0 i = 0 while i < 5: summary =", "while i < 5: summary = summary + i print(summary)", "< 5: summary = summary + i print(summary) i =", "summary = 0 i = 0 while i < 5:", "summary = summary + i print(summary) i = i +", "i < 5: summary = summary + i print(summary) i" ]
[ "imtreat img = imtreat.imageManagerClass.openImageFunction(\"../images/soleil.png\", 0) img = imtreat.definedModesClass.detailEnhanceFunction(img) imtreat.imageManagerClass.saveImageFunction(\"/Téléchargements/\", \"image_1\",", "img = imtreat.imageManagerClass.openImageFunction(\"../images/soleil.png\", 0) img = imtreat.definedModesClass.detailEnhanceFunction(img) imtreat.imageManagerClass.saveImageFunction(\"/Téléchargements/\", \"image_1\", \".png\",", "import imtreat img = imtreat.imageManagerClass.openImageFunction(\"../images/soleil.png\", 0) img = imtreat.definedModesClass.detailEnhanceFunction(img) imtreat.imageManagerClass.saveImageFunction(\"/Téléchargements/\",", "= imtreat.imageManagerClass.openImageFunction(\"../images/soleil.png\", 0) img = imtreat.definedModesClass.detailEnhanceFunction(img) imtreat.imageManagerClass.saveImageFunction(\"/Téléchargements/\", \"image_1\", \".png\", img)" ]
[ "RAM implies 512MB of RAM allocated at startup. Possible values:", "service or its path is in the PATH environment variable", "path to an ``mkisofs.exe`` installation. \"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\" External virtual", "a VHD, otherwise the config drive will remain an ISO.", "provided, the first of a list of available vswitches is", "is used to fetch the state of the instance from", "software-based layer-2 Ethernet network switch that is available with the", "a mounted disk, in seconds. Possible values: * Time in", "be used to convert the ISO to a VHD, otherwise", "has to change this value. Possible values: * Time in", "shutdown within this window. Possible values: * Time in seconds", "in compliance with the License. You may obtain # a", "be found or the retry count is reached. Possible values:", "``mkisofs.exe`` installation. Additionally, you must set the ``qemu_img_cmd`` value to", "the target host. If left blank, an administrative share (hidden", "The query runs until the device can be found or", "the instance from Hyper-V through the WMI interface, within the", "between the total RAM assigned to an instance and its", "attachment attempts, in seconds. Possible values: * Time in seconds", "values: * Time in seconds (Default: 5). Related options: *", "Enables dynamic memory allocation (ballooning) when set to a value", "RemoteFX can be requested with the following flavor extra specs:", "PATH environment variable and leave this option to the default", "Server 2016. **os:monitors**. Guest VM number of monitors. Acceptable values::", "and limitations # under the License. from oslo_config import cfg", "an external one (not internal or private). Possible values: *", "with other options that enable config drive usage with Hyper-V,", "Reserved. # # Licensed under the Apache License, Version 2.0", "option is False, qemu-img will be used to convert the", "value to the full path to an ``mkisofs.exe`` installation. Additionally,", "case it is in the same directory as the nova-compute", "soft reboot request is made. We fall back to hard", "Instance power state change event polling frequency. Sets the listener", "[1, 8] - Windows / Hyper-V Server 2016 **os:vram**. Guest", "option. You can either give the full path of qemu-img.exe", "title='The Hyper-V feature', help=\"\"\" The hyperv feature allows you to", "value for startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\" Enable instance metrics", "The config drive can be attached as a disk drive", "share) will be used, looking for the same \"instances_path\" used", "multipath connections when attaching iSCSI or FC disks. This requires", "and its startup RAM amount. For example a ratio of", "* You can configure the Compute service to always create", "metrics collections for an instance by using Hyper-V's metric APIs.", "/ Hyper-V Server 2012 R2 [1, 8] - Windows /", "interface, within the specified timeframe. Possible values: * Timeframe in", "this window. Possible values: * Time in seconds (Default: 60).", "\"\"\"), cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\" Limit CPU features This flag is", "enabled. Instances with RemoteFX can be requested with the following", "hard reboot if instance does not shutdown within this window.", "shut down after soft reboot request is made. We fall", "switch name The Hyper-V Virtual Switch is a software-based layer-2", "default=False, help=\"\"\" Enable instance metrics collection Enables metrics collections for", "Windows / Hyper-V Server 2012 R2 [1, 8] - Windows", "hypervisor driver to be used within an OpenStack deployment. \"\"\")", "* Path of qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options: * If", "number of times to retry attaching a volume. Volume attachment", "security, isolation, and service levels. The vSwitch represented by this", "\"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2, min=0, help=\"\"\" Power state event polling interval", "to in writing, software # distributed under the License is", "retry checking for a mounted disk. The query runs until", "with Hyper-V, you must set the ``mkisofs_cmd`` value to the", "an ``mkisofs.exe`` installation. \"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\" External virtual switch name", "power state changes. This option is used to fetch the", "is reached. Possible values: * Positive integer values (Default: 10).", "or agreed to in writing, software # distributed under the", "The retry loop runs with volume_attach_retry_count and volume_attach_retry_interval configuration options.", "Mount config drive as a CD drive. OpenStack can be", "\"\"\"), cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\" Enable RemoteFX feature This requires at", "state event polling interval Instance power state change event polling", "be used if this option here is left blank. \"\"\"),", "query retry interval Interval between checks for a mounted disk,", "interval for power state events to the given value. This", "cfg.IntOpt('power_state_event_polling_interval', default=2, min=0, help=\"\"\" Power state event polling interval Instance", "qemu-img.exe or set its path in the PATH environment variable", "Apache License, Version 2.0 (the \"License\"); you may # not", "\"\": An administrative share will be used (Default). * Name", "help=\"\"\" Enable instance metrics collection Enables metrics collections for an", "All Rights Reserved. # # Licensed under the Apache License,", "first of a list of available vswitches is used. This", "mount retries is declared with \"mounted_disk_query_retry_interval\" option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5,", "option to ``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\" Inject password to", "help=\"\"\" Enable RemoteFX feature This requires at least one DirectX", "License, Version 2.0 (the \"License\"); you may # not use", "drive. When enabled, the admin password will be available from", "The timeframe to be checked for instance power state changes.", "be used, looking for the same \"instances_path\" used locally. Possible", "attached to the instance before it boots. The config drive", "not use this file except in compliance with the License.", "to be used within an OpenStack deployment. \"\"\") hyperv_opts =", "found or the retry count is reached. Possible values: *", "seconds (Default: 60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2, min=0, help=\"\"\" Power state", "share will be used (Default). * Name of a Windows", "will be used for estabilishing iSCSI sessions. If none are", "one (not internal or private). Possible values: * If not", "a software-based layer-2 Ethernet network switch that is available with", "Switch provides policy enforcement for security, isolation, and service levels.", "* If the config_drive_cdrom option is False, qemu-img will be", "must be configured to claim such devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[],", "collections for an instance by using Hyper-V's metric APIs. Collected", "otherwise the config drive will remain an ISO. To use", "or newer and RDS-Virtualization feature has to be enabled. Instances", "used to fetch the state of the instance from Hyper-V", "feature to copy files to the target host. If left", "cfg hyperv_opt_group = cfg.OptGroup(\"hyperv\", title='The Hyper-V feature', help=\"\"\" The hyperv", "by using Hyper-V's metric APIs. Collected data can be retrieved", "used locally. Possible values: * \"\": An administrative share will", "License is distributed on an \"AS IS\" BASIS, WITHOUT #", "\"License\"); you may # not use this file except in", "with different CPU features and checked during instance creation in", "times to retry checking for a mounted disk. The query", "for this config option. You can either give the full", "full path of qemu-img.exe or set its path in the", "get it from here: (http://qemu.weilnetz.de/) or you can install the", "can be found or the retry count is reached. Possible", "is required for some of the image related operations like", "isolation, and service levels. The vSwitch represented by this config", "the Hyper-V server role. The switch includes programmatically managed and", "for an instance by using Hyper-V's metric APIs. Collected data", "internal lifecycle notifications of instances that reboot themselves. It is", "services, e.g.: Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\" Instances path share", "blank, an administrative share (hidden network share) will be used,", "This option is used to fetch the state of the", "to connect virtual machines to both virtual networks and the", "usage with Hyper-V, such as ``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10, min=0,", "limit the CPU features used by the instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count',", "Enables allocation of total implied RAM divided by this value", "We fall back to hard reboot if instance does not", "than 1.0: Enables allocation of total implied RAM divided by", "path of qemu-img.exe or set its path in the PATH", "configured to claim such devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\" List", "switch that is available with the installation of the Hyper-V", "<NAME> # All Rights Reserved. # # Licensed under the", "to claim such devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\" List of", "dynamic memory allocation (Default). * Float values greater than 1.0:", "state check timeframe The timeframe to be checked for instance", "The vSwitch represented by this config option must be an", "write instance metadata to a config drive, which is then", "available on Windows / Hyper-V Server 2016. **os:monitors**. Guest VM", "It is unlikely that an operator has to change this", "with volume_attach_retry_interval option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5, min=0, help=\"\"\" Volume attach", "[1, 4] - Windows / Hyper-V Server 2012 R2 [1,", "The value expresses the ratio between the total RAM assigned", "default=1.0, help=\"\"\" Dynamic memory ratio Enables dynamic memory allocation (ballooning)", "Server 2016 **os:vram**. Guest VM VRAM amount. Only available on", "amount. Only available on Windows / Hyper-V Server 2016. Acceptable", "features and checked during instance creation in order to limit", "feature to be enabled. MPIO must be configured to claim", "share (hidden network share) will be used, looking for the", "to retry attaching a volume. Volume attachment is retried until", "reboot seconds Number of seconds to wait for instance to", "converting between different image types. You can get it from", "distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR", "``config_drive_format`` option must be set to ``iso9660`` in order to", "choose the initiator. \"\"\") ] def register_opts(conf): conf.register_group(hyperv_opt_group) conf.register_opts(hyperv_opts, group=hyperv_opt_group)", "used. This list is queried using WQL. * Virtual switch", "5). Related options: * This option is meaningful when the", "2560x1600, 3840x2160 ``3840x2160`` is only available on Windows / Hyper-V", "than 1. * The retry loop runs with volume_attach_retry_count and", "amount. For example a ratio of 2.0 for an instance", "a configuration drive by setting the ``force_config_drive`` option to ``True``.", "* Time in seconds (Default: 2). \"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\"", "values: * Positive integer values. Values greater than 1 is", "host. If left blank, an administrative share (hidden network share)", "use CD drive as the config drive image. * To", "be enabled. MPIO must be configured to claim such devices.", "Version 2.0 (the \"License\"); you may # not use this", "This option is meaningful when used with other options that", "count is reached. Possible values: * Positive integer values (Default:", "or private). Possible values: * If not provided, the first", "polling frequency. Sets the listener interval for power state events", "meaningful when the mounted_disk_query_retry_count is greater than 1. * The", "to an ``mkisofs.exe`` installation. Additionally, you must set the ``qemu_img_cmd``", "share The name of a Windows share mapped to the", "(ballooning) when set to a value greater than 1. The", "default=60, min=0, help=\"\"\" Wait soft reboot seconds Number of seconds", "themselves. It is unlikely that an operator has to change", "\"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5, min=0, help=\"\"\" Volume attach retry interval Interval", "enabled, the admin password will be available from the config", "10). Related options: * Time interval between attachment attempts is", "retry interval Interval between volume attachment attempts, in seconds. Possible", "PATH environment variable (Default). * Path of qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND).", "Related options: * This option is meaningful with ``force_config_drive`` option", "/ Hyper-V Server 2016. **os:monitors**. Guest VM number of monitors.", "retrieved by other apps and services, e.g.: Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share',", "Guest VM VRAM amount. Only available on Windows / Hyper-V", "a CD drive. Related options: * This option is meaningful", "runs with volume_attach_retry_count and volume_attach_retry_interval configuration options. \"\"\"), cfg.BoolOpt('enable_remotefx', default=False,", "estabilishing iSCSI sessions. If none are specified, the Microsoft iSCSI", "the initiator. \"\"\") ] def register_opts(conf): conf.register_group(hyperv_opt_group) conf.register_opts(hyperv_opts, group=hyperv_opt_group) def", "values: * Time in seconds (Default: 60). \"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False,", "retried until success or the given retry count is reached.", "requested with the following flavor extra specs: **os:resolution**. Guest VM", "be configured to claim such devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\"", "a volume. Volume attachment is retried until success or the", "compliance with the License. You may obtain # a copy", "be set to ``iso9660`` in order to use CD drive", "to configure the Hyper-V hypervisor driver to be used within", "adapter for Windows / Hyper-V Server 2012 R2 or newer", "of the instance from Hyper-V through the WMI interface, within", "set to ``iso9660`` in order to use CD drive as", "(Default: 60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2, min=0, help=\"\"\" Power state event", "mapped to the \"instances_path\" dir and used by the resize", "which is then attached to the instance before it boots.", "# # Unless required by applicable law or agreed to", "can be attached as a disk drive (default) or as", "DirectX 11 capable graphics adapter for Windows / Hyper-V Server", "least one DirectX 11 capable graphics adapter for Windows /", "implied RAM divided by this value for startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection',", "instance by using Hyper-V's metric APIs. Collected data can be", "soft reboot seconds Number of seconds to wait for instance", "default=False, help=\"\"\" Use multipath connections when attaching iSCSI or FC", "default=[], help=\"\"\" List of iSCSI initiators that will be used", "when the mounted_disk_query_retry_count is greater than 1. * The retry", "for instance to shut down after soft reboot request is", "The switch includes programmatically managed and extensible capabilities to connect", "Guest VM number of monitors. Acceptable values:: [1, 4] -", "cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0, help=\"\"\" Mounted disk query retry count The", "Hyper-V hypervisor driver to be used within an OpenStack deployment.", "for a mounted disk, in seconds. Possible values: * Time", "after soft reboot request is made. We fall back to", "the qemu-img executable, in case it is in the same", "cfg.IntOpt('volume_attach_retry_interval', default=5, min=0, help=\"\"\" Volume attach retry interval Interval between", "admin password will be available from the config drive image.", "instance does not shutdown within this window. Possible values: *", "Path of qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options: * If the", "BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either", "polling interval Instance power state change event polling frequency. Sets", "until success or the given retry count is reached. Possible", "Additionally, you must set the ``qemu_img_cmd`` value to the full", "# Copyright (c) 2016 <NAME> # All Rights Reserved. #", "option enhances the internal lifecycle notifications of instances that reboot", "to limit the CPU features used by the instance. \"\"\"),", "Acceptable values:: 1024x768, 1280x1024, 1600x1200, 1920x1200, 2560x1600, 3840x2160 ``3840x2160`` is", "2016. **os:monitors**. Guest VM number of monitors. Acceptable values:: [1,", "values. Values greater than 1 is recommended (Default: 10). Related", "frequency. Sets the listener interval for power state events to", "Related options: * Time interval between disk mount retries is", "may obtain # a copy of the License at #", "loop runs with mounted_disk_query_retry_count and mounted_disk_query_retry_interval configuration options. \"\"\"), cfg.IntOpt('power_state_check_timeframe',", "timeframe. Possible values: * Timeframe in seconds (Default: 60). \"\"\"),", "greater than 1. The value expresses the ratio between the", "RAM amount. For example a ratio of 2.0 for an", "Unless required by applicable law or agreed to in writing,", "\"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0, help=\"\"\" Mounted disk query retry count", "the internal lifecycle notifications of instances that reboot themselves. It", "with ``force_config_drive`` option set to ``True`` or when the REST", "blank. \"\"\"), cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\" Limit CPU features This flag", "the same directory as the nova-compute service or its path", "help=\"\"\" qemu-img command qemu-img is required for some of the", "the ``mkisofs_cmd`` value to the full path to an ``mkisofs.exe``", "Interval between volume attachment attempts, in seconds. Possible values: *", "disk mount retries is declared with \"mounted_disk_query_retry_interval\" option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval',", "drive image. * To use config drive with Hyper-V, you", "the PATH environment variable (Default). * Path of qemu-img command", "value. Possible values: * Name of the qemu-img executable, in", "to hard reboot if instance does not shutdown within this", "iSCSI or FC disks. This requires the Multipath IO Windows", "permissions and limitations # under the License. from oslo_config import", "implies 512MB of RAM allocated at startup. Possible values: *", "options: * Time interval between attachment attempts is declared with", "image related operations like converting between different image types. You", "help=\"\"\" Limit CPU features This flag is needed to support", "options: * This option is meaningful when used with other", "will be used if this option here is left blank.", "the following flavor extra specs: **os:resolution**. Guest VM screen resolution", "its path in the PATH environment variable and leave this", "is meaningful when volume_attach_retry_count is greater than 1. * The", "variable (Default). * Path of qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options:", "by the resize feature to copy files to the target", "environment variable (Default). * Path of qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related", "that is available with the installation of the Hyper-V server", "connect virtual machines to both virtual networks and the physical", "set to a value greater than 1. The value expresses", "from here: (http://qemu.weilnetz.de/) or you can install the Cloudbase OpenStack", "vswitches is used. This list is queried using WQL. *", "REST API call to create an instance will have ``--config-drive=True``", "vSwitch represented by this config option must be an external", "either express or implied. See the # License for the", "path to an ``mkisofs.exe`` installation. Additionally, you must set the", "runs until the device can be found or the retry", "values greater than 1.0: Enables allocation of total implied RAM", "set its path in the PATH environment variable and leave", "\"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0, help=\"\"\" Mounted disk query retry interval", "may # not use this file except in compliance with", "state change event polling frequency. Sets the listener interval for", "an instance with 1024MB of RAM implies 512MB of RAM", "min=0, help=\"\"\" Wait soft reboot seconds Number of seconds to", "of the qemu-img executable, in case it is in the", "attach retry interval Interval between volume attachment attempts, in seconds.", "give the full path of qemu-img.exe or set its path", "to both virtual networks and the physical network. In addition,", "directory which will be used if this option here is", "option must be set to ``iso9660`` in order to use", "an OpenStack deployment. \"\"\") hyperv_opts = [ cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\"", "disk query retry count The number of times to retry", "When enabled, the admin password will be available from the", "the License. from oslo_config import cfg hyperv_opt_group = cfg.OptGroup(\"hyperv\", title='The", "values: * Timeframe in seconds (Default: 60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2,", "network. In addition, Hyper-V Virtual Switch provides policy enforcement for", "Related options: * If the config_drive_cdrom option is False, qemu-img", "default=False, help=\"\"\" Inject password to config drive. When enabled, the", "CD drive. OpenStack can be configured to write instance metadata", "or as a CD drive. Related options: * This option", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "operator has to change this value. Possible values: * Time", "(not internal or private). Possible values: * If not provided,", "options: * This option is meaningful when the mounted_disk_query_retry_count is", "cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\" List of iSCSI initiators that will be", "startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\" Enable instance metrics collection Enables", "is in the same directory as the nova-compute service or", "path for this config option. You can either give the", "Time in seconds (Default: 60). \"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\" Mount", "cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\" qemu-img command qemu-img is required for some", "seconds to wait for instance to shut down after soft", "Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\" Instances path share The name", "**os:monitors**. Guest VM number of monitors. Acceptable values:: [1, 4]", "retry count The number of times to retry checking for", "can install the Cloudbase OpenStack Hyper-V Compute Driver (https://cloudbase.it/openstack-hyperv-driver/) which", "Positive integer values. Values greater than 1 is recommended (Default:", "will have ``--config-drive=True`` flag. * ``config_drive_format`` option must be set", "cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\" Enable RemoteFX feature This requires at least", "the full path of qemu-img.exe or set its path in", "metrics collection Enables metrics collections for an instance by using", "this config option. You can either give the full path", "of monitors. Acceptable values:: [1, 4] - Windows / Hyper-V", "instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0, help=\"\"\" Mounted disk query retry", "network share) will be used, looking for the same \"instances_path\"", "drive image. Related options: * This option is meaningful when", "sets the proper path for this config option. You can", "1024MB of RAM implies 512MB of RAM allocated at startup.", "APIs. Collected data can be retrieved by other apps and", "Possible values: * Time in seconds (Default: 5). Related options:", "locally. Possible values: * \"\": An administrative share will be", "- Windows / Hyper-V Server 2016 **os:vram**. Guest VM VRAM", "1600x1200, 1920x1200, 2560x1600, 3840x2160 ``3840x2160`` is only available on Windows", "proper path for this config option. You can either give", "(Default: 10). Related options: * Time interval between attachment attempts", "a value greater than 1. The value expresses the ratio", "Hyper-V feature', help=\"\"\" The hyperv feature allows you to configure", "total RAM assigned to an instance and its startup RAM", "events to the given value. This option enhances the internal", "reached. Possible values: * Positive integer values. Values greater than", "creation in order to limit the CPU features used by", "can be requested with the following flavor extra specs: **os:resolution**.", "default=10, min=0, help=\"\"\" Volume attach retry count The number of", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "will be used, looking for the same \"instances_path\" used locally.", "2016 <NAME> # All Rights Reserved. # # Licensed under", "ratio between the total RAM assigned to an instance and", "of a list of available vswitches is used. This list", "be used for estabilishing iSCSI sessions. If none are specified,", "before it boots. The config drive can be attached as", "as the config drive image. * To use config drive", "iSCSI initiator service will choose the initiator. \"\"\") ] def", "an instance and its startup RAM amount. For example a", "state of the instance from Hyper-V through the WMI interface,", "virtual switch name The Hyper-V Virtual Switch is a software-based", "for power state events to the given value. This option", "image. Related options: * This option is meaningful when used", "flag. * ``config_drive_format`` option must be set to ``iso9660`` in", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "the retry count is reached. Possible values: * Positive integer", "Microsoft iSCSI initiator service will choose the initiator. \"\"\") ]", "and volume_attach_retry_interval configuration options. \"\"\"), cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\" Enable RemoteFX", "with 1024MB of RAM implies 512MB of RAM allocated at", "is queried using WQL. * Virtual switch name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds',", "Enable instance metrics collection Enables metrics collections for an instance", "CD drive. Related options: * This option is meaningful with", "environment variable and leave this option to the default value.", "machines to both virtual networks and the physical network. In", "RAM allocated at startup. Possible values: * 1.0: Disables dynamic", "operations like converting between different image types. You can get", "1.0: Enables allocation of total implied RAM divided by this", "hyperv_opt_group = cfg.OptGroup(\"hyperv\", title='The Hyper-V feature', help=\"\"\" The hyperv feature", "only available on Windows / Hyper-V Server 2016. **os:monitors**. Guest", "the Multipath IO Windows feature to be enabled. MPIO must", "an instance will have ``--config-drive=True`` flag. * ``config_drive_format`` option must", "Power state event polling interval Instance power state change event", "in seconds (Default: 5). Related options: * This option is", "command installation. * You can configure the Compute service to", "changes. This option is used to fetch the state of", "provides policy enforcement for security, isolation, and service levels. The", "(Default: 60). \"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\" Mount config drive as", "help=\"\"\" Mount config drive as a CD drive. OpenStack can", "cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\" Enable instance metrics collection Enables metrics collections", "of a Windows share. Related options: * \"instances_path\": The directory", "Acceptable values:: 64, 128, 256, 512, 1024 \"\"\"), cfg.BoolOpt('use_multipath_io', default=False,", "interval between disk mount retries is declared with \"mounted_disk_query_retry_interval\" option.", "image types. You can get it from here: (http://qemu.weilnetz.de/) or", "not shutdown within this window. Possible values: * Time in", "* \"\": An administrative share will be used (Default). *", "resize feature to copy files to the target host. If", "the specific language governing permissions and limitations # under the", "to the full path to an ``mkisofs.exe`` installation. \"\"\"), cfg.StrOpt('vswitch_name',", "Related options: * Time interval between attachment attempts is declared", "under the Apache License, Version 2.0 (the \"License\"); you may", "with volume_attach_retry_count and volume_attach_retry_interval configuration options. \"\"\"), cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\"", "\"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60, min=0, help=\"\"\" Power state check timeframe The", "3840x2160 ``3840x2160`` is only available on Windows / Hyper-V Server", "check timeframe The timeframe to be checked for instance power", "Possible values: * If not provided, the first of a", "options: * This option is meaningful with ``force_config_drive`` option set", "you must set the ``qemu_img_cmd`` value to the full path", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "will be used (Default). * Name of a Windows share.", "Compute service to always create a configuration drive by setting", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "/ Hyper-V Server 2016. Acceptable values:: 64, 128, 256, 512,", "help=\"\"\" List of iSCSI initiators that will be used for", "config option must be an external one (not internal or", "to config drive. When enabled, the admin password will be", "option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5, min=0, help=\"\"\" Volume attach retry interval", "/ Hyper-V Server 2016 **os:vram**. Guest VM VRAM amount. Only", "role. The switch includes programmatically managed and extensible capabilities to", "feature has to be enabled. Instances with RemoteFX can be", "required by applicable law or agreed to in writing, software", "Possible values: * Positive integer values. Values greater than 1", "migration to hosts with different CPU features and checked during", "values (Default: 10). Related options: * Time interval between attachment", "with mounted_disk_query_retry_count and mounted_disk_query_retry_interval configuration options. \"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60, min=0,", "1. * The retry loop runs with mounted_disk_query_retry_count and mounted_disk_query_retry_interval", "agreed to in writing, software # distributed under the License", "option is meaningful when the mounted_disk_query_retry_count is greater than 1.", "cfg.IntOpt('power_state_check_timeframe', default=60, min=0, help=\"\"\" Power state check timeframe The timeframe", "distributed under the License is distributed on an \"AS IS\"", "(Default: 10). Related options: * Time interval between disk mount", "CONDITIONS OF ANY KIND, either express or implied. See the", "volume attachment attempts, in seconds. Possible values: * Time in", "Volume attach retry interval Interval between volume attachment attempts, in", "be used (Default). * Name of a Windows share. Related", "retry loop runs with volume_attach_retry_count and volume_attach_retry_interval configuration options. \"\"\"),", "assigned to an instance and its startup RAM amount. For", "are specified, the Microsoft iSCSI initiator service will choose the", "to use CD drive as the config drive image. *", "for Windows / Hyper-V Server 2012 R2 or newer and", "such devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\" List of iSCSI initiators", "The Hyper-V Virtual Switch is a software-based layer-2 Ethernet network", "option here is left blank. \"\"\"), cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\" Limit", "one DirectX 11 capable graphics adapter for Windows / Hyper-V", "used within an OpenStack deployment. \"\"\") hyperv_opts = [ cfg.FloatOpt('dynamic_memory_ratio',", "same \"instances_path\" used locally. Possible values: * \"\": An administrative", "Possible values: * Timeframe in seconds (Default: 60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval',", "list of available vswitches is used. This list is queried", "used to convert the ISO to a VHD, otherwise the", "You can get it from here: (http://qemu.weilnetz.de/) or you can", "fall back to hard reboot if instance does not shutdown", "mounted_disk_query_retry_count is greater than 1. * The retry loop runs", "VM number of monitors. Acceptable values:: [1, 4] - Windows", "options is meaningful when volume_attach_retry_count is greater than 1. *", "this value for startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\" Enable instance", "initiators that will be used for estabilishing iSCSI sessions. If", "Use multipath connections when attaching iSCSI or FC disks. This", "IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND,", "memory allocation (ballooning) when set to a value greater than", "help=\"\"\" Volume attach retry count The number of times to", "expresses the ratio between the total RAM assigned to an", "different image types. You can get it from here: (http://qemu.weilnetz.de/)", "configure the Compute service to always create a configuration drive", "success or the given retry count is reached. Possible values:", "See the # License for the specific language governing permissions", "CPU features used by the instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0,", "This flag is needed to support live migration to hosts", "it is in the same directory as the nova-compute service", "``qemu-img`` command installation. * You can configure the Compute service", "or set its path in the PATH environment variable and", "request is made. We fall back to hard reboot if", "retry attaching a volume. Volume attachment is retried until success", "law or agreed to in writing, software # distributed under", "to the full path to an ``qemu-img`` command installation. *", "claim such devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\" List of iSCSI", "Hyper-V, you must set the ``mkisofs_cmd`` value to the full", "the admin password will be available from the config drive", "Timeframe in seconds (Default: 60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2, min=0, help=\"\"\"", "to ``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\" Inject password to config", "is meaningful when the mounted_disk_query_retry_count is greater than 1. *", "5). Related options: * This options is meaningful when volume_attach_retry_count", "List of iSCSI initiators that will be used for estabilishing", "convert the ISO to a VHD, otherwise the config drive", "target host. If left blank, an administrative share (hidden network", "\"mounted_disk_query_retry_interval\" option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0, help=\"\"\" Mounted disk query", "2016 **os:vram**. Guest VM VRAM amount. Only available on Windows", "will remain an ISO. To use config drive with Hyper-V,", "Enable RemoteFX feature This requires at least one DirectX 11", "options: * \"instances_path\": The directory which will be used if", "within this window. Possible values: * Time in seconds (Default:", "This requires the Multipath IO Windows feature to be enabled.", "full path to an ``mkisofs.exe`` installation. Additionally, you must set", "# # Licensed under the Apache License, Version 2.0 (the", "the config drive image. * To use config drive with", "512, 1024 \"\"\"), cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\" Use multipath connections when", "the ``qemu_img_cmd`` value to the full path to an ``qemu-img``", "using WQL. * Virtual switch name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0,", "``force_config_drive`` option to ``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\" Inject password", "``mkisofs_cmd`` value to the full path to an ``mkisofs.exe`` installation.", "example a ratio of 2.0 for an instance with 1024MB", "config drive image. * To use config drive with Hyper-V,", "from oslo_config import cfg hyperv_opt_group = cfg.OptGroup(\"hyperv\", title='The Hyper-V feature',", "for a mounted disk. The query runs until the device", "checked for instance power state changes. This option is used", "Positive integer values (Default: 10). Related options: * Time interval", "ISO. To use config drive with Hyper-V, you must set", "divided by this value for startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\"", "\"\"\"), cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\" Use multipath connections when attaching iSCSI", "on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS", "Windows feature to be enabled. MPIO must be configured to", "to the default value. Possible values: * Name of the", "# under the License. from oslo_config import cfg hyperv_opt_group =", "this value. Possible values: * Time in seconds (Default: 2).", "def register_opts(conf): conf.register_group(hyperv_opt_group) conf.register_opts(hyperv_opts, group=hyperv_opt_group) def list_opts(): return {hyperv_opt_group: hyperv_opts}", "volume_attach_retry_interval configuration options. \"\"\"), cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\" Enable RemoteFX feature", "disk. The query runs until the device can be found", "listener interval for power state events to the given value.", "the first of a list of available vswitches is used.", "virtual machines to both virtual networks and the physical network.", "License. from oslo_config import cfg hyperv_opt_group = cfg.OptGroup(\"hyperv\", title='The Hyper-V", "seconds (Default: 5). Related options: * This option is meaningful", "that enable config drive usage with Hyper-V, such as ``force_config_drive``.", "always create a configuration drive by setting the ``force_config_drive`` option", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "to shut down after soft reboot request is made. We", "full path to an ``mkisofs.exe`` installation. \"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\" External", "the same \"instances_path\" used locally. Possible values: * \"\": An", "Server 2016. Acceptable values:: 64, 128, 256, 512, 1024 \"\"\"),", "declared with \"mounted_disk_query_retry_interval\" option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0, help=\"\"\" Mounted", "This option is meaningful when the mounted_disk_query_retry_count is greater than", "128, 256, 512, 1024 \"\"\"), cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\" Use multipath", "between attachment attempts is declared with volume_attach_retry_interval option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval',", "To use config drive with Hyper-V, you must set the", "that an operator has to change this value. Possible values:", "the config drive image. Related options: * This option is", "\"\"\") hyperv_opts = [ cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\" Dynamic memory ratio", "ratio Enables dynamic memory allocation (ballooning) when set to a", "Compute Driver (https://cloudbase.it/openstack-hyperv-driver/) which automatically sets the proper path for", "is a software-based layer-2 Ethernet network switch that is available", "must set the ``qemu_img_cmd`` value to the full path to", "\"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\" Enable instance metrics collection Enables metrics", "\"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10, min=0, help=\"\"\" Volume attach retry count The", "to be checked for instance power state changes. This option", "* To use config drive with Hyper-V, you must set", "Related options: * \"instances_path\": The directory which will be used", "setting the ``force_config_drive`` option to ``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\"", "Volume attach retry count The number of times to retry", "help=\"\"\" Power state check timeframe The timeframe to be checked", "to retry checking for a mounted disk. The query runs", "If not provided, the first of a list of available", "False, qemu-img will be used to convert the ISO to", "Acceptable values:: [1, 4] - Windows / Hyper-V Server 2012", "name The Hyper-V Virtual Switch is a software-based layer-2 Ethernet", "If left blank, an administrative share (hidden network share) will", "1024x768, 1280x1024, 1600x1200, 1920x1200, 2560x1600, 3840x2160 ``3840x2160`` is only available", "either give the full path of qemu-img.exe or set its", "Multipath IO Windows feature to be enabled. MPIO must be", "An administrative share will be used (Default). * Name of", "to copy files to the target host. If left blank,", "Windows / Hyper-V Server 2016. Acceptable values:: 64, 128, 256,", "as a disk drive (default) or as a CD drive.", "(https://cloudbase.it/openstack-hyperv-driver/) which automatically sets the proper path for this config", "variable and leave this option to the default value. Possible", "Mounted disk query retry count The number of times to", "You can either give the full path of qemu-img.exe or", "an administrative share (hidden network share) will be used, looking", "meaningful when used with other options that enable config drive", "Windows share mapped to the \"instances_path\" dir and used by", "timeframe to be checked for instance power state changes. This", "device can be found or the retry count is reached.", "values: * \"\": An administrative share will be used (Default).", "4] - Windows / Hyper-V Server 2012 R2 [1, 8]", "query retry count The number of times to retry checking", "512MB of RAM allocated at startup. Possible values: * 1.0:", "to an instance and its startup RAM amount. For example", "disks. This requires the Multipath IO Windows feature to be", "Guest VM screen resolution size. Acceptable values:: 1024x768, 1280x1024, 1600x1200,", "min=0, help=\"\"\" Power state check timeframe The timeframe to be", "install the Cloudbase OpenStack Hyper-V Compute Driver (https://cloudbase.it/openstack-hyperv-driver/) which automatically", "* This option is meaningful when the mounted_disk_query_retry_count is greater", "retry interval Interval between checks for a mounted disk, in", "Hyper-V Server 2016. **os:monitors**. Guest VM number of monitors. Acceptable", "OF ANY KIND, either express or implied. See the #", "like converting between different image types. You can get it", "in writing, software # distributed under the License is distributed", "1024 \"\"\"), cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\" Use multipath connections when attaching", "as ``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10, min=0, help=\"\"\" Volume attach retry", "is declared with \"mounted_disk_query_retry_interval\" option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0, help=\"\"\"", "to always create a configuration drive by setting the ``force_config_drive``", "here is left blank. \"\"\"), cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\" Limit CPU", "Wait soft reboot seconds Number of seconds to wait for", "available from the config drive image. Related options: * This", "count The number of times to retry checking for a", "interval between attachment attempts is declared with volume_attach_retry_interval option. \"\"\"),", "instance to shut down after soft reboot request is made.", "instance from Hyper-V through the WMI interface, within the specified", "by the instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0, help=\"\"\" Mounted disk", "to support live migration to hosts with different CPU features", "The number of times to retry attaching a volume. Volume", "specs: **os:resolution**. Guest VM screen resolution size. Acceptable values:: 1024x768,", "Possible values: * Time in seconds (Default: 60). \"\"\"), cfg.BoolOpt('config_drive_cdrom',", "* The retry loop runs with mounted_disk_query_retry_count and mounted_disk_query_retry_interval configuration", "Hyper-V Server 2016. Acceptable values:: 64, 128, 256, 512, 1024", "automatically sets the proper path for this config option. You", "or when the REST API call to create an instance", "when volume_attach_retry_count is greater than 1. * The retry loop", "with the License. You may obtain # a copy of", "help=\"\"\" Wait soft reboot seconds Number of seconds to wait", "Related options: * This option is meaningful when used with", "the state of the instance from Hyper-V through the WMI", "you can install the Cloudbase OpenStack Hyper-V Compute Driver (https://cloudbase.it/openstack-hyperv-driver/)", "is meaningful with ``force_config_drive`` option set to ``True`` or when", "of available vswitches is used. This list is queried using", "Only available on Windows / Hyper-V Server 2016. Acceptable values::", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "allocation (ballooning) when set to a value greater than 1.", "= cfg.OptGroup(\"hyperv\", title='The Hyper-V feature', help=\"\"\" The hyperv feature allows", "between checks for a mounted disk, in seconds. Possible values:", "extra specs: **os:resolution**. Guest VM screen resolution size. Acceptable values::", "programmatically managed and extensible capabilities to connect virtual machines to", "connections when attaching iSCSI or FC disks. This requires the", "greater than 1. * The retry loop runs with volume_attach_retry_count", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "is made. We fall back to hard reboot if instance", "share mapped to the \"instances_path\" dir and used by the", "instance will have ``--config-drive=True`` flag. * ``config_drive_format`` option must be", "values:: 1024x768, 1280x1024, 1600x1200, 1920x1200, 2560x1600, 3840x2160 ``3840x2160`` is only", "if this option here is left blank. \"\"\"), cfg.BoolOpt('limit_cpu_features', default=False,", "except in compliance with the License. You may obtain #", "memory allocation (Default). * Float values greater than 1.0: Enables", "the installation of the Hyper-V server role. The switch includes", "within an OpenStack deployment. \"\"\") hyperv_opts = [ cfg.FloatOpt('dynamic_memory_ratio', default=1.0,", "* Positive integer values (Default: 10). Related options: * Time", "at startup. Possible values: * 1.0: Disables dynamic memory allocation", "The retry loop runs with mounted_disk_query_retry_count and mounted_disk_query_retry_interval configuration options.", "/ Hyper-V Server 2012 R2 or newer and RDS-Virtualization feature", "1. The value expresses the ratio between the total RAM", "``iso9660`` in order to use CD drive as the config", "# distributed under the License is distributed on an \"AS", "# Unless required by applicable law or agreed to in", "configure the Hyper-V hypervisor driver to be used within an", "instance power state changes. This option is used to fetch", "values:: 64, 128, 256, 512, 1024 \"\"\"), cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\"", "is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES", "resolution size. Acceptable values:: 1024x768, 1280x1024, 1600x1200, 1920x1200, 2560x1600, 3840x2160", "is greater than 1. * The retry loop runs with", "Possible values: * Time in seconds (Default: 2). \"\"\"), cfg.StrOpt('qemu_img_cmd',", "has to be enabled. Instances with RemoteFX can be requested", "Values greater than 1 is recommended (Default: 10). Related options:", "option must be an external one (not internal or private).", "Name of the qemu-img executable, in case it is in", "is used. This list is queried using WQL. * Virtual", "checked during instance creation in order to limit the CPU", "of times to retry attaching a volume. Volume attachment is", "1280x1024, 1600x1200, 1920x1200, 2560x1600, 3840x2160 ``3840x2160`` is only available on", "change this value. Possible values: * Time in seconds (Default:", "in seconds (Default: 5). Related options: * This options is", "ratio of 2.0 for an instance with 1024MB of RAM", "enforcement for security, isolation, and service levels. The vSwitch represented", "be enabled. Instances with RemoteFX can be requested with the", "of total implied RAM divided by this value for startup.", "External virtual switch name The Hyper-V Virtual Switch is a", "drive by setting the ``force_config_drive`` option to ``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password',", "to a VHD, otherwise the config drive will remain an", "levels. The vSwitch represented by this config option must be", "specific language governing permissions and limitations # under the License.", "# not use this file except in compliance with the", "lifecycle notifications of instances that reboot themselves. It is unlikely", "total implied RAM divided by this value for startup. \"\"\"),", "drive usage with Hyper-V, such as ``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10,", "available with the installation of the Hyper-V server role. The", "number of monitors. Acceptable values:: [1, 4] - Windows /", "2012 R2 or newer and RDS-Virtualization feature has to be", "must set the ``mkisofs_cmd`` value to the full path to", "attempts, in seconds. Possible values: * Time in seconds (Default:", "to an ``mkisofs.exe`` installation. \"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\" External virtual switch", "other apps and services, e.g.: Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\"", "the ISO to a VHD, otherwise the config drive will", "value greater than 1. The value expresses the ratio between", "help=\"\"\" Mounted disk query retry interval Interval between checks for", "value expresses the ratio between the total RAM assigned to", "under the License is distributed on an \"AS IS\" BASIS,", "the instance before it boots. The config drive can be", "a Windows share. Related options: * \"instances_path\": The directory which", "min=0, help=\"\"\" Mounted disk query retry interval Interval between checks", "on Windows / Hyper-V Server 2016. Acceptable values:: 64, 128,", "integer values (Default: 10). Related options: * Time interval between", "configuration drive by setting the ``force_config_drive`` option to ``True``. \"\"\"),", "this file except in compliance with the License. You may", "language governing permissions and limitations # under the License. from", "disk query retry interval Interval between checks for a mounted", "create an instance will have ``--config-drive=True`` flag. * ``config_drive_format`` option", "count is reached. Possible values: * Positive integer values. Values", "config drive image. Related options: * This option is meaningful", "fetch the state of the instance from Hyper-V through the", "drive as a CD drive. OpenStack can be configured to", "addition, Hyper-V Virtual Switch provides policy enforcement for security, isolation,", "value to the full path to an ``qemu-img`` command installation.", "help=\"\"\" Use multipath connections when attaching iSCSI or FC disks.", "notifications of instances that reboot themselves. It is unlikely that", "R2 [1, 8] - Windows / Hyper-V Server 2016 **os:vram**.", "looking for the same \"instances_path\" used locally. Possible values: *", "Virtual Switch is a software-based layer-2 Ethernet network switch that", "to the given value. This option enhances the internal lifecycle", "enable config drive usage with Hyper-V, such as ``force_config_drive``. \"\"\"),", "order to use CD drive as the config drive image.", "Windows share. Related options: * \"instances_path\": The directory which will", "to write instance metadata to a config drive, which is", "Float values greater than 1.0: Enables allocation of total implied", "file except in compliance with the License. You may obtain", "Virtual Switch provides policy enforcement for security, isolation, and service", "CD drive as the config drive image. * To use", "or its path is in the PATH environment variable (Default).", "OR CONDITIONS OF ANY KIND, either express or implied. See", "to an ``qemu-img`` command installation. * You can configure the", "directory as the nova-compute service or its path is in", "In addition, Hyper-V Virtual Switch provides policy enforcement for security,", "number of times to retry checking for a mounted disk.", "state changes. This option is used to fetch the state", "graphics adapter for Windows / Hyper-V Server 2012 R2 or", "with the following flavor extra specs: **os:resolution**. Guest VM screen", "nova-compute service or its path is in the PATH environment", "the nova-compute service or its path is in the PATH", "for estabilishing iSCSI sessions. If none are specified, the Microsoft", "to create an instance will have ``--config-drive=True`` flag. * ``config_drive_format``", "1.0: Disables dynamic memory allocation (Default). * Float values greater", "OpenStack Hyper-V Compute Driver (https://cloudbase.it/openstack-hyperv-driver/) which automatically sets the proper", "service levels. The vSwitch represented by this config option must", "be available from the config drive image. Related options: *", "default=2, min=0, help=\"\"\" Power state event polling interval Instance power", "then attached to the instance before it boots. The config", "its path is in the PATH environment variable (Default). *", "2). \"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\" qemu-img command qemu-img is required", "in order to use CD drive as the config drive", "state events to the given value. This option enhances the", "from the config drive image. Related options: * This option", "\"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\" qemu-img command qemu-img is required for", "(DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options: * If the config_drive_cdrom option is False,", "feature This requires at least one DirectX 11 capable graphics", "Disables dynamic memory allocation (Default). * Float values greater than", "network switch that is available with the installation of the", "integer values. Values greater than 1 is recommended (Default: 10).", "Hyper-V Server 2012 R2 [1, 8] - Windows / Hyper-V", "can get it from here: (http://qemu.weilnetz.de/) or you can install", "cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\" Mount config drive as a CD drive.", "that will be used for estabilishing iSCSI sessions. If none", "options: * This options is meaningful when volume_attach_retry_count is greater", "installation. \"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\" External virtual switch name The Hyper-V", "default=5, min=0, help=\"\"\" Mounted disk query retry interval Interval between", "needed to support live migration to hosts with different CPU", "Hyper-V, such as ``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10, min=0, help=\"\"\" Volume", "\"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\" Inject password to config drive. When", "default=5, min=0, help=\"\"\" Volume attach retry interval Interval between volume", "of times to retry checking for a mounted disk. The", "layer-2 Ethernet network switch that is available with the installation", "Server 2012 R2 [1, 8] - Windows / Hyper-V Server", "writing, software # distributed under the License is distributed on", "than 1. The value expresses the ratio between the total", "the full path to an ``mkisofs.exe`` installation. Additionally, you must", "option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0, help=\"\"\" Mounted disk query retry", "the License. You may obtain # a copy of the", "8] - Windows / Hyper-V Server 2016 **os:vram**. Guest VM", "capabilities to connect virtual machines to both virtual networks and", "use this file except in compliance with the License. You", "for security, isolation, and service levels. The vSwitch represented by", "of the Hyper-V server role. The switch includes programmatically managed", "options: * Time interval between disk mount retries is declared", "between disk mount retries is declared with \"mounted_disk_query_retry_interval\" option. \"\"\"),", "startup RAM amount. For example a ratio of 2.0 for", "RAM assigned to an instance and its startup RAM amount.", "Copyright (c) 2016 <NAME> # All Rights Reserved. # #", "server role. The switch includes programmatically managed and extensible capabilities", "name of a Windows share mapped to the \"instances_path\" dir", "configured to write instance metadata to a config drive, which", "and the physical network. In addition, Hyper-V Virtual Switch provides", "Possible values: * Positive integer values (Default: 10). Related options:", "the resize feature to copy files to the target host.", "mounted_disk_query_retry_count and mounted_disk_query_retry_interval configuration options. \"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60, min=0, help=\"\"\"", "* Time in seconds (Default: 60). \"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\"", "path share The name of a Windows share mapped to", "physical network. In addition, Hyper-V Virtual Switch provides policy enforcement", "``True`` or when the REST API call to create an", "- Windows / Hyper-V Server 2012 R2 [1, 8] -", "This requires at least one DirectX 11 capable graphics adapter", "a list of available vswitches is used. This list is", "with Hyper-V, such as ``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10, min=0, help=\"\"\"", "and mounted_disk_query_retry_interval configuration options. \"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60, min=0, help=\"\"\" Power", "instance and its startup RAM amount. For example a ratio", "express or implied. See the # License for the specific", "the Apache License, Version 2.0 (the \"License\"); you may #", "support live migration to hosts with different CPU features and", "can be configured to write instance metadata to a config", "a Windows share mapped to the \"instances_path\" dir and used", "greater than 1.0: Enables allocation of total implied RAM divided", "the mounted_disk_query_retry_count is greater than 1. * The retry loop", "is only available on Windows / Hyper-V Server 2016. **os:monitors**.", "in seconds (Default: 60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2, min=0, help=\"\"\" Power", "volume_attach_retry_count and volume_attach_retry_interval configuration options. \"\"\"), cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\" Enable", "to the full path to an ``mkisofs.exe`` installation. Additionally, you", "reached. Possible values: * Positive integer values (Default: 10). Related", "full path to an ``qemu-img`` command installation. * You can", "retry count is reached. Possible values: * Positive integer values.", "recommended (Default: 10). Related options: * Time interval between disk", "dynamic memory allocation (ballooning) when set to a value greater", "interval Interval between volume attachment attempts, in seconds. Possible values:", "will be available from the config drive image. Related options:", "with \"mounted_disk_query_retry_interval\" option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0, help=\"\"\" Mounted disk", "* Time interval between disk mount retries is declared with", "installation of the Hyper-V server role. The switch includes programmatically", "\"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\" External virtual switch name The Hyper-V Virtual", "monitors. Acceptable values:: [1, 4] - Windows / Hyper-V Server", "some of the image related operations like converting between different", "drive, which is then attached to the instance before it", "config option. You can either give the full path of", "dir and used by the resize feature to copy files", "values: * Name of the qemu-img executable, in case it", "config_drive_cdrom option is False, qemu-img will be used to convert", "qemu-img is required for some of the image related operations", "help=\"\"\" Inject password to config drive. When enabled, the admin", "specified, the Microsoft iSCSI initiator service will choose the initiator.", "used, looking for the same \"instances_path\" used locally. Possible values:", "volume_attach_retry_interval option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5, min=0, help=\"\"\" Volume attach retry", "virtual networks and the physical network. In addition, Hyper-V Virtual", "Driver (https://cloudbase.it/openstack-hyperv-driver/) which automatically sets the proper path for this", "instance with 1024MB of RAM implies 512MB of RAM allocated", "this config option must be an external one (not internal", "config drive. When enabled, the admin password will be available", "a config drive, which is then attached to the instance", "driver to be used within an OpenStack deployment. \"\"\") hyperv_opts", "qemu-img command qemu-img is required for some of the image", "<filename>nova/conf/hyperv.py<gh_stars>0 # Copyright (c) 2016 <NAME> # All Rights Reserved.", "Instances path share The name of a Windows share mapped", "switch includes programmatically managed and extensible capabilities to connect virtual", "to change this value. Possible values: * Time in seconds", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "* ``config_drive_format`` option must be set to ``iso9660`` in order", "used by the instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0, help=\"\"\" Mounted", "here: (http://qemu.weilnetz.de/) or you can install the Cloudbase OpenStack Hyper-V", "value to the full path to an ``mkisofs.exe`` installation. \"\"\"),", "11 capable graphics adapter for Windows / Hyper-V Server 2012", "wait for instance to shut down after soft reboot request", "be retrieved by other apps and services, e.g.: Ceilometer. \"\"\"),", "(Default: 5). Related options: * This options is meaningful when", "the physical network. In addition, Hyper-V Virtual Switch provides policy", "an ``qemu-img`` command installation. * You can configure the Compute", "WMI interface, within the specified timeframe. Possible values: * Timeframe", "the given value. This option enhances the internal lifecycle notifications", "initiator service will choose the initiator. \"\"\") ] def register_opts(conf):", "of the image related operations like converting between different image", "retry loop runs with mounted_disk_query_retry_count and mounted_disk_query_retry_interval configuration options. \"\"\"),", "60). \"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\" Mount config drive as a", "values: * Positive integer values (Default: 10). Related options: *", "cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0, help=\"\"\" Wait soft reboot seconds Number of", "retry count is reached. Possible values: * Positive integer values", "``qemu_img_cmd`` value to the full path to an ``qemu-img`` command", "qemu-img executable, in case it is in the same directory", "is retried until success or the given retry count is", "set to ``True`` or when the REST API call to", "declared with volume_attach_retry_interval option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5, min=0, help=\"\"\" Volume", "to ``iso9660`` in order to use CD drive as the", "the proper path for this config option. You can either", "* This options is meaningful when volume_attach_retry_count is greater than", "The number of times to retry checking for a mounted", "feature allows you to configure the Hyper-V hypervisor driver to", "License for the specific language governing permissions and limitations #", "import cfg hyperv_opt_group = cfg.OptGroup(\"hyperv\", title='The Hyper-V feature', help=\"\"\" The", "This option enhances the internal lifecycle notifications of instances that", "and leave this option to the default value. Possible values:", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "flag is needed to support live migration to hosts with", "meaningful with ``force_config_drive`` option set to ``True`` or when the", "help=\"\"\" Volume attach retry interval Interval between volume attachment attempts,", "executable, in case it is in the same directory as", "Rights Reserved. # # Licensed under the Apache License, Version", "help=\"\"\" The hyperv feature allows you to configure the Hyper-V", "to wait for instance to shut down after soft reboot", "MPIO must be configured to claim such devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list',", "``--config-drive=True`` flag. * ``config_drive_format`` option must be set to ``iso9660``", "VM VRAM amount. Only available on Windows / Hyper-V Server", "instances that reboot themselves. It is unlikely that an operator", "Collected data can be retrieved by other apps and services,", "hosts with different CPU features and checked during instance creation", "interval Interval between checks for a mounted disk, in seconds.", "which will be used if this option here is left", "use config drive with Hyper-V, you must set the ``mkisofs_cmd``", "is available with the installation of the Hyper-V server role.", "includes programmatically managed and extensible capabilities to connect virtual machines", "in seconds (Default: 2). \"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\" qemu-img command", "mounted disk. The query runs until the device can be", "default=\"qemu-img.exe\", help=\"\"\" qemu-img command qemu-img is required for some of", "option is used to fetch the state of the instance", "by setting the ``force_config_drive`` option to ``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False,", "attached as a disk drive (default) or as a CD", "default=\"\", help=\"\"\" Instances path share The name of a Windows", "the # License for the specific language governing permissions and", "is unlikely that an operator has to change this value.", "managed and extensible capabilities to connect virtual machines to both", "flavor extra specs: **os:resolution**. Guest VM screen resolution size. Acceptable", "within the specified timeframe. Possible values: * Timeframe in seconds", "given retry count is reached. Possible values: * Positive integer", "the Microsoft iSCSI initiator service will choose the initiator. \"\"\")", "power state change event polling frequency. Sets the listener interval", "OpenStack can be configured to write instance metadata to a", "to be enabled. Instances with RemoteFX can be requested with", "help=\"\"\" Mounted disk query retry count The number of times", "seconds Number of seconds to wait for instance to shut", "is declared with volume_attach_retry_interval option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5, min=0, help=\"\"\"", "allows you to configure the Hyper-V hypervisor driver to be", "help=\"\"\" Dynamic memory ratio Enables dynamic memory allocation (ballooning) when", "config drive with Hyper-V, you must set the ``mkisofs_cmd`` value", "the Hyper-V hypervisor driver to be used within an OpenStack", "hyperv feature allows you to configure the Hyper-V hypervisor driver", "Virtual switch name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0, help=\"\"\" Wait soft", "Mounted disk query retry interval Interval between checks for a", "does not shutdown within this window. Possible values: * Time", "attachment attempts is declared with volume_attach_retry_interval option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5,", "in case it is in the same directory as the", "create a configuration drive by setting the ``force_config_drive`` option to", "**os:vram**. Guest VM VRAM amount. Only available on Windows /", "reboot themselves. It is unlikely that an operator has to", "it boots. The config drive can be attached as a", "* Virtual switch name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0, help=\"\"\" Wait", "allocated at startup. Possible values: * 1.0: Disables dynamic memory", "options. \"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60, min=0, help=\"\"\" Power state check timeframe", "sessions. If none are specified, the Microsoft iSCSI initiator service", "to a value greater than 1. The value expresses the", "for an instance with 1024MB of RAM implies 512MB of", "is meaningful when used with other options that enable config", "if instance does not shutdown within this window. Possible values:", "disk, in seconds. Possible values: * Time in seconds (Default:", "Related options: * This options is meaningful when volume_attach_retry_count is", "and services, e.g.: Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\" Instances path", "This list is queried using WQL. * Virtual switch name.", "attach retry count The number of times to retry attaching", "available vswitches is used. This list is queried using WQL.", "config drive will remain an ISO. To use config drive", "Hyper-V server role. The switch includes programmatically managed and extensible", "using Hyper-V's metric APIs. Collected data can be retrieved by", "startup. Possible values: * 1.0: Disables dynamic memory allocation (Default).", "timeframe The timeframe to be checked for instance power state", "the PATH environment variable and leave this option to the", "made. We fall back to hard reboot if instance does", "Instances with RemoteFX can be requested with the following flavor", "remain an ISO. To use config drive with Hyper-V, you", "(Default). * Name of a Windows share. Related options: *", "RAM divided by this value for startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False,", "must be set to ``iso9660`` in order to use CD", "different CPU features and checked during instance creation in order", "values:: [1, 4] - Windows / Hyper-V Server 2012 R2", "size. Acceptable values:: 1024x768, 1280x1024, 1600x1200, 1920x1200, 2560x1600, 3840x2160 ``3840x2160``", "can be retrieved by other apps and services, e.g.: Ceilometer.", "Windows / Hyper-V Server 2016. **os:monitors**. Guest VM number of", "in the PATH environment variable (Default). * Path of qemu-img", "left blank, an administrative share (hidden network share) will be", "cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0, help=\"\"\" Mounted disk query retry interval Interval", "window. Possible values: * Time in seconds (Default: 60). \"\"\"),", "option is meaningful with ``force_config_drive`` option set to ``True`` or", "between volume attachment attempts, in seconds. Possible values: * Time", "given value. This option enhances the internal lifecycle notifications of", "following flavor extra specs: **os:resolution**. Guest VM screen resolution size.", "as the nova-compute service or its path is in the", "greater than 1 is recommended (Default: 10). Related options: *", "Hyper-V's metric APIs. Collected data can be retrieved by other", "meaningful when volume_attach_retry_count is greater than 1. * The retry", "or you can install the Cloudbase OpenStack Hyper-V Compute Driver", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "This options is meaningful when volume_attach_retry_count is greater than 1.", "password will be available from the config drive image. Related", "you may # not use this file except in compliance", "CPU features and checked during instance creation in order to", "mounted disk, in seconds. Possible values: * Time in seconds", "Server 2012 R2 or newer and RDS-Virtualization feature has to", "external one (not internal or private). Possible values: * If", "is in the PATH environment variable (Default). * Path of", "* Positive integer values. Values greater than 1 is recommended", "# WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "can either give the full path of qemu-img.exe or set", "mounted_disk_query_retry_interval configuration options. \"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60, min=0, help=\"\"\" Power state", "or FC disks. This requires the Multipath IO Windows feature", "FC disks. This requires the Multipath IO Windows feature to", "Hyper-V through the WMI interface, within the specified timeframe. Possible", "an ``mkisofs.exe`` installation. Additionally, you must set the ``qemu_img_cmd`` value", "event polling frequency. Sets the listener interval for power state", "2012 R2 [1, 8] - Windows / Hyper-V Server 2016", "such as ``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10, min=0, help=\"\"\" Volume attach", "for the specific language governing permissions and limitations # under", "when set to a value greater than 1. The value", "for the same \"instances_path\" used locally. Possible values: * \"\":", "on Windows / Hyper-V Server 2016. **os:monitors**. Guest VM number", "power state events to the given value. This option enhances", "an operator has to change this value. Possible values: *", "be requested with the following flavor extra specs: **os:resolution**. Guest", "this option here is left blank. \"\"\"), cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\"", "of 2.0 for an instance with 1024MB of RAM implies", "internal or private). Possible values: * If not provided, the", "\"\"\") ] def register_opts(conf): conf.register_group(hyperv_opt_group) conf.register_opts(hyperv_opts, group=hyperv_opt_group) def list_opts(): return", "to the \"instances_path\" dir and used by the resize feature", "Power state check timeframe The timeframe to be checked for", "the default value. Possible values: * Name of the qemu-img", "seconds (Default: 5). Related options: * This options is meaningful", "1. * The retry loop runs with volume_attach_retry_count and volume_attach_retry_interval", "of a Windows share mapped to the \"instances_path\" dir and", "or the retry count is reached. Possible values: * Positive", "Possible values: * \"\": An administrative share will be used", "as a CD drive. Related options: * This option is", "limitations # under the License. from oslo_config import cfg hyperv_opt_group", "password to config drive. When enabled, the admin password will", "enabled. MPIO must be configured to claim such devices. \"\"\"),", "= [ cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\" Dynamic memory ratio Enables dynamic", "that reboot themselves. It is unlikely that an operator has", "memory ratio Enables dynamic memory allocation (ballooning) when set to", "options. \"\"\"), cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\" Enable RemoteFX feature This requires", "of RAM implies 512MB of RAM allocated at startup. Possible", "to hosts with different CPU features and checked during instance", "greater than 1. * The retry loop runs with mounted_disk_query_retry_count", "required for some of the image related operations like converting", "seconds. Possible values: * Time in seconds (Default: 5). Related", "**os:resolution**. Guest VM screen resolution size. Acceptable values:: 1024x768, 1280x1024,", "instance metrics collection Enables metrics collections for an instance by", "VM screen resolution size. Acceptable values:: 1024x768, 1280x1024, 1600x1200, 1920x1200,", "the instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0, help=\"\"\" Mounted disk query", "* Name of the qemu-img executable, in case it is", "RemoteFX feature This requires at least one DirectX 11 capable", "have ``--config-drive=True`` flag. * ``config_drive_format`` option must be set to", "switch name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0, help=\"\"\" Wait soft reboot", "] def register_opts(conf): conf.register_group(hyperv_opt_group) conf.register_opts(hyperv_opts, group=hyperv_opt_group) def list_opts(): return {hyperv_opt_group:", "\"instances_path\" dir and used by the resize feature to copy", "the \"instances_path\" dir and used by the resize feature to", "10). Related options: * Time interval between disk mount retries", "with RemoteFX can be requested with the following flavor extra", "the ratio between the total RAM assigned to an instance", "\"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\" Mount config drive as a CD", "iSCSI sessions. If none are specified, the Microsoft iSCSI initiator", "the given retry count is reached. Possible values: * Positive", "Windows / Hyper-V Server 2016 **os:vram**. Guest VM VRAM amount.", "of qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options: * If the config_drive_cdrom", "is left blank. \"\"\"), cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\" Limit CPU features", "values: * If not provided, the first of a list", "as a CD drive. OpenStack can be configured to write", "the Compute service to always create a configuration drive by", "you to configure the Hyper-V hypervisor driver to be used", "You may obtain # a copy of the License at", "VHD, otherwise the config drive will remain an ISO. To", "reboot request is made. We fall back to hard reboot", "allocation (Default). * Float values greater than 1.0: Enables allocation", "path to an ``qemu-img`` command installation. * You can configure", "Interval between checks for a mounted disk, in seconds. Possible", "* This option is meaningful when used with other options", "by other apps and services, e.g.: Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share', default=\"\",", "option set to ``True`` or when the REST API call", "* \"instances_path\": The directory which will be used if this", "be checked for instance power state changes. This option is", "instance metadata to a config drive, which is then attached", "Cloudbase OpenStack Hyper-V Compute Driver (https://cloudbase.it/openstack-hyperv-driver/) which automatically sets the", "hyperv_opts = [ cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\" Dynamic memory ratio Enables", "this option to the default value. Possible values: * Name", "is then attached to the instance before it boots. The", "retries is declared with \"mounted_disk_query_retry_interval\" option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0,", "(Default: 5). Related options: * This option is meaningful when", "unlikely that an operator has to change this value. Possible", "Time interval between attachment attempts is declared with volume_attach_retry_interval option.", "to convert the ISO to a VHD, otherwise the config", "during instance creation in order to limit the CPU features", "software # distributed under the License is distributed on an", "(the \"License\"); you may # not use this file except", "when the REST API call to create an instance will", "R2 or newer and RDS-Virtualization feature has to be enabled.", "the CPU features used by the instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10,", "path in the PATH environment variable and leave this option", "requires at least one DirectX 11 capable graphics adapter for", "(hidden network share) will be used, looking for the same", "[ cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\" Dynamic memory ratio Enables dynamic memory", "installation. * You can configure the Compute service to always", "help=\"\"\" Power state event polling interval Instance power state change", "* This option is meaningful with ``force_config_drive`` option set to", "by this value for startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\" Enable", "which automatically sets the proper path for this config option.", "administrative share will be used (Default). * Name of a", "The hyperv feature allows you to configure the Hyper-V hypervisor", "same directory as the nova-compute service or its path is", "attempts is declared with volume_attach_retry_interval option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5, min=0,", "of qemu-img.exe or set its path in the PATH environment", "option to the default value. Possible values: * Name of", "networks and the physical network. In addition, Hyper-V Virtual Switch", "\"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY", "(Default). * Path of qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options: *", "Switch is a software-based layer-2 Ethernet network switch that is", "attaching iSCSI or FC disks. This requires the Multipath IO", "both virtual networks and the physical network. In addition, Hyper-V", "for some of the image related operations like converting between", "an instance by using Hyper-V's metric APIs. Collected data can", "to a config drive, which is then attached to the", "used if this option here is left blank. \"\"\"), cfg.BoolOpt('limit_cpu_features',", "min=0, help=\"\"\" Volume attach retry count The number of times", "it from here: (http://qemu.weilnetz.de/) or you can install the Cloudbase", "or the given retry count is reached. Possible values: *", "volume. Volume attachment is retried until success or the given", "cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\" Instances path share The name of a", "image. * To use config drive with Hyper-V, you must", "related operations like converting between different image types. You can", "cfg.OptGroup(\"hyperv\", title='The Hyper-V feature', help=\"\"\" The hyperv feature allows you", "Possible values: * Name of the qemu-img executable, in case", "configuration options. \"\"\"), cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\" Enable RemoteFX feature This", "(c) 2016 <NAME> # All Rights Reserved. # # Licensed", "types. You can get it from here: (http://qemu.weilnetz.de/) or you", "boots. The config drive can be attached as a disk", "left blank. \"\"\"), cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\" Limit CPU features This", "for instance power state changes. This option is used to", "the image related operations like converting between different image types.", "``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10, min=0, help=\"\"\" Volume attach retry count", "through the WMI interface, within the specified timeframe. Possible values:", "default value. Possible values: * Name of the qemu-img executable,", "* Timeframe in seconds (Default: 60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2, min=0,", "help=\"\"\" Instances path share The name of a Windows share", "(Default: 2). \"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\" qemu-img command qemu-img is", "attaching a volume. Volume attachment is retried until success or", "1 is recommended (Default: 10). Related options: * Time interval", "default=False, help=\"\"\" Mount config drive as a CD drive. OpenStack", "default=60, min=0, help=\"\"\" Power state check timeframe The timeframe to", "count The number of times to retry attaching a volume.", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "and checked during instance creation in order to limit the", "be configured to write instance metadata to a config drive,", "Inject password to config drive. When enabled, the admin password", "This option is meaningful with ``force_config_drive`` option set to ``True``", "call to create an instance will have ``--config-drive=True`` flag. *", "an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF", "API call to create an instance will have ``--config-drive=True`` flag.", "used for estabilishing iSCSI sessions. If none are specified, the", "capable graphics adapter for Windows / Hyper-V Server 2012 R2", "VRAM amount. Only available on Windows / Hyper-V Server 2016.", "is reached. Possible values: * Positive integer values. Values greater", "cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\" Use multipath connections when attaching iSCSI or", "installation. Additionally, you must set the ``qemu_img_cmd`` value to the", "Hyper-V Virtual Switch provides policy enforcement for security, isolation, and", "Time interval between disk mount retries is declared with \"mounted_disk_query_retry_interval\"", "than 1 is recommended (Default: 10). Related options: * Time", "deployment. \"\"\") hyperv_opts = [ cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\" Dynamic memory", "back to hard reboot if instance does not shutdown within", "used with other options that enable config drive usage with", "its startup RAM amount. For example a ratio of 2.0", "If the config_drive_cdrom option is False, qemu-img will be used", "instance before it boots. The config drive can be attached", "KIND, either express or implied. See the # License for", "\"\"\"), cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\" Instances path share The name of", "For example a ratio of 2.0 for an instance with", "Possible values: * 1.0: Disables dynamic memory allocation (Default). *", "to the instance before it boots. The config drive can", "set the ``qemu_img_cmd`` value to the full path to an", "seconds (Default: 60). \"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\" Mount config drive", "instance creation in order to limit the CPU features used", "Hyper-V Server 2012 R2 or newer and RDS-Virtualization feature has", "drive will remain an ISO. To use config drive with", "* If not provided, the first of a list of", "* 1.0: Disables dynamic memory allocation (Default). * Float values", "between different image types. You can get it from here:", "of instances that reboot themselves. It is unlikely that an", "the listener interval for power state events to the given", "cfg.StrOpt('vswitch_name', help=\"\"\" External virtual switch name The Hyper-V Virtual Switch", "when used with other options that enable config drive usage", "# a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "the specified timeframe. Possible values: * Timeframe in seconds (Default:", "The directory which will be used if this option here", "``mkisofs.exe`` installation. \"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\" External virtual switch name The", "configuration options. \"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60, min=0, help=\"\"\" Power state check", "values: * 1.0: Disables dynamic memory allocation (Default). * Float", "256, 512, 1024 \"\"\"), cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\" Use multipath connections", "min=0, help=\"\"\" Power state event polling interval Instance power state", "private). Possible values: * If not provided, the first of", "queried using WQL. * Virtual switch name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60,", "implied. See the # License for the specific language governing", "list is queried using WQL. * Virtual switch name. \"\"\"),", "an ISO. To use config drive with Hyper-V, you must", "will choose the initiator. \"\"\") ] def register_opts(conf): conf.register_group(hyperv_opt_group) conf.register_opts(hyperv_opts,", "copy files to the target host. If left blank, an", "values: * Time in seconds (Default: 2). \"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\",", "``force_config_drive`` option set to ``True`` or when the REST API", "in the same directory as the nova-compute service or its", "of RAM allocated at startup. Possible values: * 1.0: Disables", "requires the Multipath IO Windows feature to be enabled. MPIO", "Hyper-V Server 2016 **os:vram**. Guest VM VRAM amount. Only available", "the total RAM assigned to an instance and its startup", "If none are specified, the Microsoft iSCSI initiator service will", "is recommended (Default: 10). Related options: * Time interval between", "Time in seconds (Default: 5). Related options: * This option", "extensible capabilities to connect virtual machines to both virtual networks", "administrative share (hidden network share) will be used, looking for", "``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\" Inject password to config drive.", "with the installation of the Hyper-V server role. The switch", "cfg.IntOpt('volume_attach_retry_count', default=10, min=0, help=\"\"\" Volume attach retry count The number", "ISO to a VHD, otherwise the config drive will remain", "* Time interval between attachment attempts is declared with volume_attach_retry_interval", "when attaching iSCSI or FC disks. This requires the Multipath", "options: * If the config_drive_cdrom option is False, qemu-img will", "the full path to an ``mkisofs.exe`` installation. \"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\"", "Hyper-V Virtual Switch is a software-based layer-2 Ethernet network switch", "be attached as a disk drive (default) or as a", "obtain # a copy of the License at # #", "metadata to a config drive, which is then attached to", "by this config option must be an external one (not", "min=0, help=\"\"\" Volume attach retry interval Interval between volume attachment", "help=\"\"\" External virtual switch name The Hyper-V Virtual Switch is", "available on Windows / Hyper-V Server 2016. Acceptable values:: 64,", "64, 128, 256, 512, 1024 \"\"\"), cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\" Use", "in order to limit the CPU features used by the", "metric APIs. Collected data can be retrieved by other apps", "of iSCSI initiators that will be used for estabilishing iSCSI", "be used within an OpenStack deployment. \"\"\") hyperv_opts = [", "under the License. from oslo_config import cfg hyperv_opt_group = cfg.OptGroup(\"hyperv\",", "loop runs with volume_attach_retry_count and volume_attach_retry_interval configuration options. \"\"\"), cfg.BoolOpt('enable_remotefx',", "of seconds to wait for instance to shut down after", "Volume attachment is retried until success or the given retry", "1920x1200, 2560x1600, 3840x2160 ``3840x2160`` is only available on Windows /", "to the target host. If left blank, an administrative share", "drive. Related options: * This option is meaningful with ``force_config_drive``", "from Hyper-V through the WMI interface, within the specified timeframe.", "share. Related options: * \"instances_path\": The directory which will be", "default=10, min=0, help=\"\"\" Mounted disk query retry count The number", "used (Default). * Name of a Windows share. Related options:", "in the PATH environment variable and leave this option to", "the Cloudbase OpenStack Hyper-V Compute Driver (https://cloudbase.it/openstack-hyperv-driver/) which automatically sets", "event polling interval Instance power state change event polling frequency.", "the config drive will remain an ISO. To use config", "* Float values greater than 1.0: Enables allocation of total", "the device can be found or the retry count is", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "drive with Hyper-V, you must set the ``mkisofs_cmd`` value to", "options that enable config drive usage with Hyper-V, such as", "down after soft reboot request is made. We fall back", "a mounted disk. The query runs until the device can", "in seconds (Default: 60). \"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\" Mount config", "Limit CPU features This flag is needed to support live", "Number of seconds to wait for instance to shut down", "qemu-img will be used to convert the ISO to a", "will be used to convert the ISO to a VHD,", "OpenStack deployment. \"\"\") hyperv_opts = [ cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\" Dynamic", "live migration to hosts with different CPU features and checked", "times to retry attaching a volume. Volume attachment is retried", "name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0, help=\"\"\" Wait soft reboot seconds", "set the ``mkisofs_cmd`` value to the full path to an", "default=False, help=\"\"\" Limit CPU features This flag is needed to", "the WMI interface, within the specified timeframe. Possible values: *", "(default) or as a CD drive. Related options: * This", "feature', help=\"\"\" The hyperv feature allows you to configure the", "service to always create a configuration drive by setting the", "the ``force_config_drive`` option to ``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\" Inject", "oslo_config import cfg hyperv_opt_group = cfg.OptGroup(\"hyperv\", title='The Hyper-V feature', help=\"\"\"", "min=0, help=\"\"\" Mounted disk query retry count The number of", "data can be retrieved by other apps and services, e.g.:", "Enables metrics collections for an instance by using Hyper-V's metric", "2.0 (the \"License\"); you may # not use this file", "e.g.: Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\" Instances path share The", "Time in seconds (Default: 5). Related options: * This options", "iSCSI initiators that will be used for estabilishing iSCSI sessions.", "\"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\" List of iSCSI initiators that will", "by applicable law or agreed to in writing, software #", "60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2, min=0, help=\"\"\" Power state event polling", "cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\" Inject password to config drive. When enabled,", "and service levels. The vSwitch represented by this config option", "screen resolution size. Acceptable values:: 1024x768, 1280x1024, 1600x1200, 1920x1200, 2560x1600,", "features used by the instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0, help=\"\"\"", "and RDS-Virtualization feature has to be enabled. Instances with RemoteFX", "2.0 for an instance with 1024MB of RAM implies 512MB", "a disk drive (default) or as a CD drive. Related", "none are specified, the Microsoft iSCSI initiator service will choose", "WQL. * Virtual switch name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0, help=\"\"\"", "seconds (Default: 2). \"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\" qemu-img command qemu-img", "IO Windows feature to be enabled. MPIO must be configured", "# All Rights Reserved. # # Licensed under the Apache", "default=False, help=\"\"\" Enable RemoteFX feature This requires at least one", "you must set the ``mkisofs_cmd`` value to the full path", "Sets the listener interval for power state events to the", "CPU features This flag is needed to support live migration", "the REST API call to create an instance will have", "* Name of a Windows share. Related options: * \"instances_path\":", "service will choose the initiator. \"\"\") ] def register_opts(conf): conf.register_group(hyperv_opt_group)", "features This flag is needed to support live migration to", "the config_drive_cdrom option is False, qemu-img will be used to", "applicable law or agreed to in writing, software # distributed", "config drive can be attached as a disk drive (default)", "reboot if instance does not shutdown within this window. Possible", "collection Enables metrics collections for an instance by using Hyper-V's", "config drive as a CD drive. OpenStack can be configured", "and extensible capabilities to connect virtual machines to both virtual", "WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express", "* Time in seconds (Default: 5). Related options: * This", "command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options: * If the config_drive_cdrom option is", "(Default). * Float values greater than 1.0: Enables allocation of", "be an external one (not internal or private). Possible values:", "governing permissions and limitations # under the License. from oslo_config", "\"instances_path\" used locally. Possible values: * \"\": An administrative share", "devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\" List of iSCSI initiators that", "\"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0, help=\"\"\" Wait soft reboot seconds Number", "checking for a mounted disk. The query runs until the", "is False, qemu-img will be used to convert the ISO", "retry count The number of times to retry attaching a", "not provided, the first of a list of available vswitches", "Time in seconds (Default: 2). \"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\" qemu-img", "checks for a mounted disk, in seconds. Possible values: *", "disk drive (default) or as a CD drive. Related options:", "in seconds. Possible values: * Time in seconds (Default: 5).", "option is meaningful when used with other options that enable", "config drive, which is then attached to the instance before", "other options that enable config drive usage with Hyper-V, such", "leave this option to the default value. Possible values: *", "Dynamic memory ratio Enables dynamic memory allocation (ballooning) when set", "runs with mounted_disk_query_retry_count and mounted_disk_query_retry_interval configuration options. \"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60,", "command qemu-img is required for some of the image related", "# License for the specific language governing permissions and limitations", "(http://qemu.weilnetz.de/) or you can install the Cloudbase OpenStack Hyper-V Compute", "\"instances_path\": The directory which will be used if this option", "to fetch the state of the instance from Hyper-V through", "is needed to support live migration to hosts with different", "qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options: * If the config_drive_cdrom option", "a CD drive. OpenStack can be configured to write instance", "and used by the resize feature to copy files to", "used by the resize feature to copy files to the", "query runs until the device can be found or the", "represented by this config option must be an external one", "a ratio of 2.0 for an instance with 1024MB of", "for startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\" Enable instance metrics collection", "files to the target host. If left blank, an administrative", "at least one DirectX 11 capable graphics adapter for Windows", "specified timeframe. Possible values: * Timeframe in seconds (Default: 60).", "can configure the Compute service to always create a configuration", "cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\" Dynamic memory ratio Enables dynamic memory allocation", "must be an external one (not internal or private). Possible", "drive can be attached as a disk drive (default) or", "cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\" Limit CPU features This flag is needed", "to be enabled. MPIO must be configured to claim such", "``3840x2160`` is only available on Windows / Hyper-V Server 2016.", "volume_attach_retry_count is greater than 1. * The retry loop runs", "The name of a Windows share mapped to the \"instances_path\"", "value. Possible values: * Time in seconds (Default: 2). \"\"\"),", "Related options: * This option is meaningful when the mounted_disk_query_retry_count", "the full path to an ``qemu-img`` command installation. * You", "initiator. \"\"\") ] def register_opts(conf): conf.register_group(hyperv_opt_group) conf.register_opts(hyperv_opts, group=hyperv_opt_group) def list_opts():", "to ``True`` or when the REST API call to create", "config drive usage with Hyper-V, such as ``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count',", "RDS-Virtualization feature has to be enabled. Instances with RemoteFX can", "drive as the config drive image. * To use config", "Ethernet network switch that is available with the installation of", "allocation of total implied RAM divided by this value for", "drive (default) or as a CD drive. Related options: *", "interval Instance power state change event polling frequency. Sets the", "License. You may obtain # a copy of the License", "path is in the PATH environment variable (Default). * Path", "attachment is retried until success or the given retry count", "ANY KIND, either express or implied. See the # License", "You can configure the Compute service to always create a", "2016. Acceptable values:: 64, 128, 256, 512, 1024 \"\"\"), cfg.BoolOpt('use_multipath_io',", "policy enforcement for security, isolation, and service levels. The vSwitch", "Hyper-V Compute Driver (https://cloudbase.it/openstack-hyperv-driver/) which automatically sets the proper path", "Windows / Hyper-V Server 2012 R2 or newer and RDS-Virtualization", "enhances the internal lifecycle notifications of instances that reboot themselves.", "until the device can be found or the retry count", "drive. OpenStack can be configured to write instance metadata to", "change event polling frequency. Sets the listener interval for power", "value. This option enhances the internal lifecycle notifications of instances", "than 1. * The retry loop runs with mounted_disk_query_retry_count and", "apps and services, e.g.: Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\" Instances", "newer and RDS-Virtualization feature has to be enabled. Instances with", "Name of a Windows share. Related options: * \"instances_path\": The", "order to limit the CPU features used by the instance.", "or implied. See the # License for the specific language", "* The retry loop runs with volume_attach_retry_count and volume_attach_retry_interval configuration" ]
[ "input(\"Chemin d'écriture ? (words.txt) \") if path == \"\": path", "x: len(x) > 4, words_list.split('\\n')) path = input(\"Chemin d'écriture ?", "4, words_list.split('\\n')) path = input(\"Chemin d'écriture ? (words.txt) \") if", "= input(\"Chemin d'écriture ? (words.txt) \") if path == \"\":", "len(x) > 4, words_list.split('\\n')) path = input(\"Chemin d'écriture ? (words.txt)", "words_list = requests.get(\"https://raw.githubusercontent.com/atebits/Words/master/Words/fr.txt\").text words_list = filter(lambda x: len(x) > 4,", "\") if path == \"\": path = \"./words.txt\" with open(path,", "\"\": path = \"./words.txt\" with open(path, \"w\", encoding=\"utf-8\") as file:", "words_list = filter(lambda x: len(x) > 4, words_list.split('\\n')) path =", "path = input(\"Chemin d'écriture ? (words.txt) \") if path ==", "d'écriture ? (words.txt) \") if path == \"\": path =", "path == \"\": path = \"./words.txt\" with open(path, \"w\", encoding=\"utf-8\")", "if path == \"\": path = \"./words.txt\" with open(path, \"w\",", "(words.txt) \") if path == \"\": path = \"./words.txt\" with", "> 4, words_list.split('\\n')) path = input(\"Chemin d'écriture ? (words.txt) \")", "requests.get(\"https://raw.githubusercontent.com/atebits/Words/master/Words/fr.txt\").text words_list = filter(lambda x: len(x) > 4, words_list.split('\\n')) path", "words_list.split('\\n')) path = input(\"Chemin d'écriture ? (words.txt) \") if path", "requests words_list = requests.get(\"https://raw.githubusercontent.com/atebits/Words/master/Words/fr.txt\").text words_list = filter(lambda x: len(x) >", "path = \"./words.txt\" with open(path, \"w\", encoding=\"utf-8\") as file: file.write('\\n'.join(words_list))", "= requests.get(\"https://raw.githubusercontent.com/atebits/Words/master/Words/fr.txt\").text words_list = filter(lambda x: len(x) > 4, words_list.split('\\n'))", "filter(lambda x: len(x) > 4, words_list.split('\\n')) path = input(\"Chemin d'écriture", "? (words.txt) \") if path == \"\": path = \"./words.txt\"", "== \"\": path = \"./words.txt\" with open(path, \"w\", encoding=\"utf-8\") as", "= filter(lambda x: len(x) > 4, words_list.split('\\n')) path = input(\"Chemin", "import requests words_list = requests.get(\"https://raw.githubusercontent.com/atebits/Words/master/Words/fr.txt\").text words_list = filter(lambda x: len(x)" ]
[ "gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload to bucket without", "prefix uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude',", "# Test upload to bucket without trailing / uploader.upload_gallery('s3://testbucket/path', gallery_path)", "import unittest from unittest import mock import os import subprocess", "'.DS_Store']) # Test upload to bucket without trailing / uploader.upload_gallery('s3://testbucket/path',", "as tempdir: # Setup mock file and uploader tempdir.write('index.html', b'')", "'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload to bucket", "TempDirectory() as tempdir: # Setup mock file and uploader tempdir.write('index.html',", "with TempDirectory() as tempdir: # Setup mock file and uploader", "Setup mock file and uploader tempdir.write('index.html', b'') gallery_path = os.path.join(tempdir.path,", "uploader tempdir.write('index.html', b'') gallery_path = os.path.join(tempdir.path, 'index.html') uploader = get_uploader('aws')", "bucket without prefix uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path,", "AWSUploaderTestCase(unittest.TestCase): def test_no_location(self): uploader = get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def test_upload_gallery(self,", "from simplegallery.upload.uploader_factory import get_uploader class AWSUploaderTestCase(unittest.TestCase): def test_no_location(self): uploader =", "'--exclude', '.DS_Store']) # Test upload to bucket without trailing /", "Test upload to bucket without prefix uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws',", "trailing / uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/',", "get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def test_upload_gallery(self, subprocess_run): subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0)", "b'') gallery_path = os.path.join(tempdir.path, 'index.html') uploader = get_uploader('aws') # Test", "to bucket without trailing / uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with( ['aws', 's3',", "without prefix uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/',", "uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store'])", "uploader = get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def test_upload_gallery(self, subprocess_run): subprocess_run.return_value =", "'s3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload to bucket without trailing", "simplegallery.upload.uploader_factory import get_uploader class AWSUploaderTestCase(unittest.TestCase): def test_no_location(self): uploader = get_uploader('aws')", "subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test", "bucket without trailing / uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync',", "upload to bucket without trailing / uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with( ['aws',", "import subprocess from testfixtures import TempDirectory from simplegallery.upload.uploader_factory import get_uploader", "'--exclude', '.DS_Store']) # Test upload to bucket without prefix uploader.upload_gallery('testbucket/path/',", "test_upload_gallery(self, subprocess_run): subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0) with TempDirectory() as tempdir:", "file and uploader tempdir.write('index.html', b'') gallery_path = os.path.join(tempdir.path, 'index.html') uploader", "get_uploader('aws') # Test upload to bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws',", "without trailing / uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path,", "test_no_location(self): uploader = get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def test_upload_gallery(self, subprocess_run): subprocess_run.return_value", "subprocess.CompletedProcess([], returncode=0) with TempDirectory() as tempdir: # Setup mock file", "['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload", "to bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/',", "= get_uploader('aws') # Test upload to bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with(", "subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) if __name__", "def test_upload_gallery(self, subprocess_run): subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0) with TempDirectory() as", "uploader = get_uploader('aws') # Test upload to bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path)", "'.DS_Store']) # Test upload to bucket without prefix uploader.upload_gallery('testbucket/path/', gallery_path)", "class AWSUploaderTestCase(unittest.TestCase): def test_no_location(self): uploader = get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def", "gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) if", "subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0) with TempDirectory() as tempdir: # Setup", "Test upload to bucket without trailing / uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with(", "mock file and uploader tempdir.write('index.html', b'') gallery_path = os.path.join(tempdir.path, 'index.html')", "mock import os import subprocess from testfixtures import TempDirectory from", "TempDirectory from simplegallery.upload.uploader_factory import get_uploader class AWSUploaderTestCase(unittest.TestCase): def test_no_location(self): uploader", "os import subprocess from testfixtures import TempDirectory from simplegallery.upload.uploader_factory import", "subprocess from testfixtures import TempDirectory from simplegallery.upload.uploader_factory import get_uploader class", "bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude',", "from testfixtures import TempDirectory from simplegallery.upload.uploader_factory import get_uploader class AWSUploaderTestCase(unittest.TestCase):", "os.path.join(tempdir.path, 'index.html') uploader = get_uploader('aws') # Test upload to bucket", "'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) if __name__ == '__main__': unittest.main()", "unittest import mock import os import subprocess from testfixtures import", "= get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def test_upload_gallery(self, subprocess_run): subprocess_run.return_value = subprocess.CompletedProcess([],", "'s3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload to", "self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def test_upload_gallery(self, subprocess_run): subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0) with", "import os import subprocess from testfixtures import TempDirectory from simplegallery.upload.uploader_factory", "@mock.patch('subprocess.run') def test_upload_gallery(self, subprocess_run): subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0) with TempDirectory()", "import TempDirectory from simplegallery.upload.uploader_factory import get_uploader class AWSUploaderTestCase(unittest.TestCase): def test_no_location(self):", "'s3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload to bucket without prefix", "get_uploader class AWSUploaderTestCase(unittest.TestCase): def test_no_location(self): uploader = get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run')", "subprocess_run): subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0) with TempDirectory() as tempdir: #", "tempdir: # Setup mock file and uploader tempdir.write('index.html', b'') gallery_path", "def test_no_location(self): uploader = get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def test_upload_gallery(self, subprocess_run):", "import get_uploader class AWSUploaderTestCase(unittest.TestCase): def test_no_location(self): uploader = get_uploader('aws') self.assertFalse(uploader.check_location(''))", "# Test upload to bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3',", "'index.html') uploader = get_uploader('aws') # Test upload to bucket uploader.upload_gallery('s3://testbucket/path/',", "# Test upload to bucket without prefix uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with(", "returncode=0) with TempDirectory() as tempdir: # Setup mock file and", "# Setup mock file and uploader tempdir.write('index.html', b'') gallery_path =", "upload to bucket without prefix uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3',", "import mock import os import subprocess from testfixtures import TempDirectory", "from unittest import mock import os import subprocess from testfixtures", "uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store'])", "Test upload to bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync',", "to bucket without prefix uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync',", "= subprocess.CompletedProcess([], returncode=0) with TempDirectory() as tempdir: # Setup mock", "gallery_path = os.path.join(tempdir.path, 'index.html') uploader = get_uploader('aws') # Test upload", "testfixtures import TempDirectory from simplegallery.upload.uploader_factory import get_uploader class AWSUploaderTestCase(unittest.TestCase): def", "'s3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) if __name__ == '__main__':", "and uploader tempdir.write('index.html', b'') gallery_path = os.path.join(tempdir.path, 'index.html') uploader =", "upload to bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path,", "['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) if __name__ ==", "unittest from unittest import mock import os import subprocess from", "uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store'])", "= os.path.join(tempdir.path, 'index.html') uploader = get_uploader('aws') # Test upload to", "tempdir.write('index.html', b'') gallery_path = os.path.join(tempdir.path, 'index.html') uploader = get_uploader('aws') #", "/ uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude',", "gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) #" ]
[ "self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None))", "file will be lost! from PyQt4 import QtCore, QtGui try:", "81, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_year.setFont(font)", "QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290,", "= QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "= QtGui.QApplication(sys.argv) Dialog_add_subject = QtGui.QDialog() ui = Ui_Dialog_add_subject() ui.setupUi(Dialog_add_subject) Dialog_add_subject.show()", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year", "290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) self.pushButton_cancel.setFont(font)", "# # Created by: PyQt4 UI code generator 4.11.4 #", "_fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try:", "91, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12)", "235, 131, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12)", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")),", "Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year = QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95, 81,", "implementation generated from reading ui file 'add_subject.ui' # # Created", "UI code generator 4.11.4 # # WARNING! All changes made", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font) icon1 = QtGui.QIcon()", "QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year = QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190,", "app = QtGui.QApplication(sys.argv) Dialog_add_subject = QtGui.QDialog() ui = Ui_Dialog_add_subject() ui.setupUi(Dialog_add_subject)", "import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def", "self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31)) font =", "return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig):", "New Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off)", "<reponame>kithsirij/NLP-based-Syllabus-Coverage-Exam-paper-checker-Tool # -*- coding: utf-8 -*- # Form implementation generated", "Dialog_add_subject.resize(568, 374) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font)", "QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\",", "None)) if __name__ == \"__main__\": import sys app = QtGui.QApplication(sys.argv)", "self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\",", "-*- # Form implementation generated from reading ui file 'add_subject.ui'", "New Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject = QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30,", "Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name =", "131, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font)", "font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31))", "self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111,", "self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31)) font =", "self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year = QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31))", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject =", "self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year = QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111,", "Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name", "self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year = QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21)) font", "lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8", "QtGui.QApplication(sys.argv) Dialog_add_subject = QtGui.QDialog() ui = Ui_Dialog_add_subject() ui.setupUi(Dialog_add_subject) Dialog_add_subject.show() sys.exit(app.exec_())", "self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel =", "utf-8 -*- # Form implementation generated from reading ui file", "self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None)) if __name__ == \"__main__\":", "disambig) class Ui_Dialog_add_subject(object): def setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374) font", "text, disambig) class Ui_Dialog_add_subject(object): def setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374)", "QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "New Roman\")) self.pushButton_cancel.setFont(font) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2)", "Roman\")) font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save = QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111,", "20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31)) font", "= QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year", "QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year =", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year =", "self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None)) if", "AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def", "font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self,", "Roman\")) font.setPointSize(14) font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190,", "self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\",", "_encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text,", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(14) font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\"))", "151, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(14) font.setBold(True)", "4.11.4 # # WARNING! All changes made in this file", "New Roman\")) font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save = QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290,", "30, 151, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(14)", "self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31))", "= QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50,", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save", "return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text,", "Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\",", "= QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig,", "file 'add_subject.ui' # # Created by: PyQt4 UI code generator", "disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context,", "= QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "__name__ == \"__main__\": import sys app = QtGui.QApplication(sys.argv) Dialog_add_subject =", "QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\",", "QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "Created by: PyQt4 UI code generator 4.11.4 # # WARNING!", "def setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None)) if __name__ == \"__main__\": import sys", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")),", "= QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding", "Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10)", "self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31)) font", "font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31))", "== \"__main__\": import sys app = QtGui.QApplication(sys.argv) Dialog_add_subject = QtGui.QDialog()", "try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s", "\"ADD SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\",", "by: PyQt4 UI code generator 4.11.4 # # WARNING! All", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon = QtGui.QIcon()", "self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year = QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21)) font =", "self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject = QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151,", "font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save = QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190,", "31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\"))", "self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31)) font = QtGui.QFont()", "31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\"))", "except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon", "icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131,", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(14) font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font)", "ui file 'add_subject.ui' # # Created by: PyQt4 UI code", "New Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160,", "def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context,", "font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save = QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31))", "290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10)", "PyQt4 UI code generator 4.11.4 # # WARNING! All changes", "Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111,", "def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_Dialog_add_subject(object):", "QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None))", "sys app = QtGui.QApplication(sys.argv) Dialog_add_subject = QtGui.QDialog() ui = Ui_Dialog_add_subject()", "self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save = QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31)) font =", "111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font)", "374) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)", "\"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\", None))", "# WARNING! All changes made in this file will be", "disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_Dialog_add_subject(object): def setupUi(self, Dialog_add_subject):", "Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject)", "\"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None)) if __name__", "Roman\")) font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester = QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165, 91,", "self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save = QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31)) font", "Ui_Dialog_add_subject(object): def setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374) font = QtGui.QFont()", "31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\"))", "self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject = QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31)) font", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester", "self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31)) font = QtGui.QFont()", "self.label_year = QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21)) font = QtGui.QFont()", "font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year = QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21))", "font.setPointSize(14) font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230,", "self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "# Form implementation generated from reading ui file 'add_subject.ui' #", "font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject = QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31))", "= QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester", "self.label_semester = QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21)) font = QtGui.QFont()", "self.label_add_subject = QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31)) font = QtGui.QFont()", "QtGui.QApplication.translate(context, text, disambig) class Ui_Dialog_add_subject(object): def setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568,", "AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class", "Roman\")) self.pushButton_cancel.setFont(font) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20,", "generator 4.11.4 # # WARNING! All changes made in this", "31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) self.pushButton_cancel.setFont(font) icon2 =", "'add_subject.ui' # # Created by: PyQt4 UI code generator 4.11.4", "_translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError:", "try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context,", "QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21))", "Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21)) font =", "QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year = QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91,", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon =", "QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal,", "QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s):", "Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off)", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester = QtGui.QLabel(Dialog_add_subject)", "changes made in this file will be lost! from PyQt4", "from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except", "= QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "font.setFamily(_fromUtf8(\"Times New Roman\")) self.pushButton_cancel.setFont(font) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off)", "font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(14) font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name =", "import sys app = QtGui.QApplication(sys.argv) Dialog_add_subject = QtGui.QDialog() ui =", "be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 =", "generated from reading ui file 'add_subject.ui' # # Created by:", "self.comboBox_year = QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31)) font = QtGui.QFont()", "= QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "# Created by: PyQt4 UI code generator 4.11.4 # #", "text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_Dialog_add_subject(object): def setupUi(self,", "font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject = QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220,", "_encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text,", "_translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_Dialog_add_subject(object): def", "QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "165, 91, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12)", "= QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "New Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year = QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95,", "font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321,", "New Roman\")) font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester = QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165,", "reading ui file 'add_subject.ui' # # Created by: PyQt4 UI", "# # WARNING! All changes made in this file will", "self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31)) font =", "\"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None)) if __name__ == \"__main__\": import", "font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190,", "self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21)) font = QtGui.QFont()", "321, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font)", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) self.pushButton_cancel.setFont(font) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")),", "text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def", "Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def", "self.pushButton_save.setFont(font) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20))", "21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\"))", "= QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340,", "in this file will be lost! from PyQt4 import QtCore,", "from reading ui file 'add_subject.ui' # # Created by: PyQt4", "font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon)", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save = QtGui.QPushButton(Dialog_add_subject)", "Form implementation generated from reading ui file 'add_subject.ui' # #", "def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except", "= QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel", "class Ui_Dialog_add_subject(object): def setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374) font =", "PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError:", "def retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\", None))", "self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester = QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21)) font =", "SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\",", "this file will be lost! from PyQt4 import QtCore, QtGui", "None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\",", "self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31)) font = QtGui.QFont()", "font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year = QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50,", "font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear)", "111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font)", "QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font) icon1 =", "95, 81, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12)", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(14) font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name", "self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self, Dialog_add_subject):", "self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\",", "\"SUBJECT NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None))", "if __name__ == \"__main__\": import sys app = QtGui.QApplication(sys.argv) Dialog_add_subject", "21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\"))", "QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1)", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) self.pushButton_cancel.setFont(font) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal,", "QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding)", "QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235,", "self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21)) font", "= QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "160, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12)", "91, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_semester.setFont(font)", "New Roman\")) font.setPointSize(14) font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject)", "icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject)", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject)", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject = QtGui.QLabel(Dialog_add_subject)", "21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\"))", "self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save =", "code generator 4.11.4 # # WARNING! All changes made in", "self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(14) font.setBold(True) font.setWeight(75)", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) self.pushButton_cancel.setFont(font) icon2 = QtGui.QIcon()", "icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\"))", "text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return", "return QtGui.QApplication.translate(context, text, disambig) class Ui_Dialog_add_subject(object): def setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\"))", "None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None)) if __name__ ==", "disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context,", "= QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\"))", "icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject)", "QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig):", "Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD", "font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester = QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50,", "20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year = QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31)) font", "self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31)) font", "icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year = QtGui.QComboBox(Dialog_add_subject)", "self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT", "retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\",", "_fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text,", "All changes made in this file will be lost! from", "\"__main__\": import sys app = QtGui.QApplication(sys.argv) Dialog_add_subject = QtGui.QDialog() ui", "NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\",", "QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding =", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester =", "coding: utf-8 -*- # Form implementation generated from reading ui", "\"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\",", "self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return", "self.pushButton_save = QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31)) font = QtGui.QFont()", "self.pushButton_cancel.setFont(font) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20))", "self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None)) if __name__ == \"__main__\": import sys app", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel,", "31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font) icon1", "None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None))", "s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return", "# -*- coding: utf-8 -*- # Form implementation generated from", "except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig)", "font.setPointSize(10) self.pushButton_save.setFont(font) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20,", "None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None))", "111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font)", "self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester = QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21)) font", "-*- coding: utf-8 -*- # Form implementation generated from reading", "made in this file will be lost! from PyQt4 import", "will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8", "230, 321, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12)", "self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year = QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31)) font =", "self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "New Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal,", "WARNING! All changes made in this file will be lost!", "self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject = QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31)) font =", "font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester = QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21))", "111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) self.pushButton_cancel.setFont(font) icon2", "None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\",", "self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\",", "\"CANCEL\", None)) if __name__ == \"__main__\": import sys app =", "QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "New Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject)", "font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")),", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester =", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year = QtGui.QLabel(Dialog_add_subject)" ]
[ "test_pre_syncdb_args(self): r = PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE,", "OneTimeReceiver(object): \"\"\" Special receiver for handle the fact that test", "= 'default' SYNCDB_VERBOSITY = 1 SYNCDB_INTERACTIVE = False class PreSyncdbReceiver(object):", "3. Test runner calls syncdb for create default database. #", "# 4. Test runner execute our unit test code. pre_syncdb_receiver", "django.core import management from django.utils import six from shared_models import", "Test runner calls syncdb for create default database. # 4.", "= 0 self.call_args = None def __call__(self, signal, sender, **kwargs):", "PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO()) args", "None def __call__(self, signal, sender, **kwargs): self.call_counter = self.call_counter +", "**kwargs): self.call_counter = self.call_counter + 1 self.call_args = kwargs class", "connect receiver before test runner creates database. That is, sequence", "here and not in unit test code because we need", "before test runner creates database. That is, sequence of #", "django.test import TestCase from django.core import management from django.utils import", "= ['app', 'create_models', 'verbosity', 'interactive', 'db'] SYNCDB_DATABASE = 'default' SYNCDB_VERBOSITY", "# 1. Test runner imports this module. # 2. We", "= PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO())", "**kwargs): # Although test runner calls syncdb for several databases,", "not in unit test code because we need to #", "code because we need to # connect receiver before test", "signal, sender, **kwargs): self.call_counter = self.call_counter + 1 self.call_args =", "<gh_stars>1-10 from django.db.models import signals from django.test import TestCase from", "self.call_args = kwargs # we need to test only one", "django.db.models import signals from django.test import TestCase from django.core import", "database. That is, sequence of # actions would be: #", "handle the fact that test runner calls syncdb for several", "runner calls syncdb for several databases, # testing for only", "six from shared_models import models PRE_SYNCDB_ARGS = ['app', 'create_models', 'verbosity',", "= None def __call__(self, signal, sender, **kwargs): # Although test", "0 self.call_args = None def __call__(self, signal, sender, **kwargs): self.call_counter", "module. # 2. We connect receiver. # 3. Test runner", "class PreSyncdbReceiver(object): def __init__(self): self.call_counter = 0 self.call_args = None", "create default database. # 4. Test runner execute our unit", "SYNCDB_INTERACTIVE = False class PreSyncdbReceiver(object): def __init__(self): self.call_counter = 0", "runner imports this module. # 2. We connect receiver. #", "== SYNCDB_DATABASE: self.call_counter = self.call_counter + 1 self.call_args = kwargs", "runner calls syncdb for several databases and several times for", "\"\"\" def __init__(self): self.call_counter = 0 self.call_args = None def", "sender=models) # We connect receiver here and not in unit", "the fact that test runner calls syncdb for several databases", "def __call__(self, signal, sender, **kwargs): # Although test runner calls", "for create default database. # 4. Test runner execute our", "r.call_args self.assertEqual(r.call_counter, 1) self.assertEqual(set(args), set(PRE_SYNCDB_ARGS)) self.assertEqual(args['app'], models) self.assertEqual(args['verbosity'], SYNCDB_VERBOSITY) self.assertEqual(args['interactive'],", "# testing for only one of them is quite sufficient.", "if kwargs['db'] == SYNCDB_DATABASE: self.call_counter = self.call_counter + 1 self.call_args", "syncdb for several databases, # testing for only one of", "be: # # 1. Test runner imports this module. #", "self.call_counter + 1 self.call_args = kwargs class OneTimeReceiver(object): \"\"\" Special", "args = r.call_args self.assertEqual(r.call_counter, 1) self.assertEqual(set(args), set(PRE_SYNCDB_ARGS)) self.assertEqual(args['app'], models) self.assertEqual(args['verbosity'],", "None def __call__(self, signal, sender, **kwargs): # Although test runner", "test only one call of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) # We", "fact that test runner calls syncdb for several databases and", "from django.core import management from django.utils import six from shared_models", "to # connect receiver before test runner creates database. That", "SYNCDB_DATABASE: self.call_counter = self.call_counter + 1 self.call_args = kwargs #", "= kwargs class OneTimeReceiver(object): \"\"\" Special receiver for handle the", "4. Test runner execute our unit test code. pre_syncdb_receiver =", "runner execute our unit test code. pre_syncdb_receiver = OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver,", "one of them is quite sufficient. if kwargs['db'] == SYNCDB_DATABASE:", "database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO()) args = r.call_args self.assertEqual(r.call_counter, 1)", "SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1) def test_pre_syncdb_args(self): r = PreSyncdbReceiver()", "this module. # 2. We connect receiver. # 3. Test", "= self.call_counter + 1 self.call_args = kwargs class OneTimeReceiver(object): \"\"\"", "= 1 SYNCDB_INTERACTIVE = False class PreSyncdbReceiver(object): def __init__(self): self.call_counter", "signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1) def test_pre_syncdb_args(self):", "sender, **kwargs): self.call_counter = self.call_counter + 1 self.call_args = kwargs", "connect receiver here and not in unit test code because", "them. \"\"\" def __init__(self): self.call_counter = 0 self.call_args = None", "our unit test code. pre_syncdb_receiver = OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class", "\"\"\" Special receiver for handle the fact that test runner", "Special receiver for handle the fact that test runner calls", "self.assertEqual(r.call_counter, 1) self.assertEqual(set(args), set(PRE_SYNCDB_ARGS)) self.assertEqual(args['app'], models) self.assertEqual(args['verbosity'], SYNCDB_VERBOSITY) self.assertEqual(args['interactive'], SYNCDB_INTERACTIVE)", "kwargs['db'] == SYNCDB_DATABASE: self.call_counter = self.call_counter + 1 self.call_args =", "False class PreSyncdbReceiver(object): def __init__(self): self.call_counter = 0 self.call_args =", "def __call__(self, signal, sender, **kwargs): self.call_counter = self.call_counter + 1", "We connect receiver here and not in unit test code", "several times for some of them. \"\"\" def __init__(self): self.call_counter", "would be: # # 1. Test runner imports this module.", "connect receiver. # 3. Test runner calls syncdb for create", "calls syncdb for several databases and several times for some", "management from django.utils import six from shared_models import models PRE_SYNCDB_ARGS", "syncdb for create default database. # 4. Test runner execute", "of them. \"\"\" def __init__(self): self.call_counter = 0 self.call_args =", "'verbosity', 'interactive', 'db'] SYNCDB_DATABASE = 'default' SYNCDB_VERBOSITY = 1 SYNCDB_INTERACTIVE", "several databases and several times for some of them. \"\"\"", "to test only one call of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) #", "SYNCDB_DATABASE = 'default' SYNCDB_VERBOSITY = 1 SYNCDB_INTERACTIVE = False class", "class OneTimeReceiver(object): \"\"\" Special receiver for handle the fact that", "signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) # We connect receiver here and not in", "unit test code because we need to # connect receiver", "self.call_counter = self.call_counter + 1 self.call_args = kwargs class OneTimeReceiver(object):", "1 self.call_args = kwargs class OneTimeReceiver(object): \"\"\" Special receiver for", "models PRE_SYNCDB_ARGS = ['app', 'create_models', 'verbosity', 'interactive', 'db'] SYNCDB_DATABASE =", "SYNCDB_VERBOSITY = 1 SYNCDB_INTERACTIVE = False class PreSyncdbReceiver(object): def __init__(self):", "= kwargs # we need to test only one call", "# We connect receiver here and not in unit test", "= r.call_args self.assertEqual(r.call_counter, 1) self.assertEqual(set(args), set(PRE_SYNCDB_ARGS)) self.assertEqual(args['app'], models) self.assertEqual(args['verbosity'], SYNCDB_VERBOSITY)", "runner calls syncdb for create default database. # 4. Test", "calls syncdb for create default database. # 4. Test runner", "because we need to # connect receiver before test runner", "for several databases, # testing for only one of them", "1 self.call_args = kwargs # we need to test only", "of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) # We connect receiver here and", "for several databases and several times for some of them.", "r = PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False,", "test runner calls syncdb for several databases and several times", "0 self.call_args = None def __call__(self, signal, sender, **kwargs): #", "from django.db.models import signals from django.test import TestCase from django.core", "= self.call_counter + 1 self.call_args = kwargs # we need", "class SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1) def test_pre_syncdb_args(self): r =", "verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO()) args = r.call_args self.assertEqual(r.call_counter, 1) self.assertEqual(set(args),", "['app', 'create_models', 'verbosity', 'interactive', 'db'] SYNCDB_DATABASE = 'default' SYNCDB_VERBOSITY =", "code. pre_syncdb_receiver = OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self):", "1 SYNCDB_INTERACTIVE = False class PreSyncdbReceiver(object): def __init__(self): self.call_counter =", "= OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1)", "__init__(self): self.call_counter = 0 self.call_args = None def __call__(self, signal,", "call of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) # We connect receiver here", "is, sequence of # actions would be: # # 1.", "'default' SYNCDB_VERBOSITY = 1 SYNCDB_INTERACTIVE = False class PreSyncdbReceiver(object): def", "creates database. That is, sequence of # actions would be:", "pre_syncdb_receiver = OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter,", "that test runner calls syncdb for several databases and several", "kwargs # we need to test only one call of", "kwargs class OneTimeReceiver(object): \"\"\" Special receiver for handle the fact", "PRE_SYNCDB_ARGS = ['app', 'create_models', 'verbosity', 'interactive', 'db'] SYNCDB_DATABASE = 'default'", "them is quite sufficient. if kwargs['db'] == SYNCDB_DATABASE: self.call_counter =", "times for some of them. \"\"\" def __init__(self): self.call_counter =", "Test runner execute our unit test code. pre_syncdb_receiver = OneTimeReceiver()", "signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO()) args =", "from django.utils import six from shared_models import models PRE_SYNCDB_ARGS =", "unit test code. pre_syncdb_receiver = OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class SyncdbSignalTests(TestCase):", "PreSyncdbReceiver(object): def __init__(self): self.call_counter = 0 self.call_args = None def", "for only one of them is quite sufficient. if kwargs['db']", "test code because we need to # connect receiver before", "receiver here and not in unit test code because we", "Although test runner calls syncdb for several databases, # testing", "# actions would be: # # 1. Test runner imports", "syncdb for several databases and several times for some of", "self.call_counter + 1 self.call_args = kwargs # we need to", "# # 1. Test runner imports this module. # 2.", "import models PRE_SYNCDB_ARGS = ['app', 'create_models', 'verbosity', 'interactive', 'db'] SYNCDB_DATABASE", "self.call_args = None def __call__(self, signal, sender, **kwargs): self.call_counter =", "in unit test code because we need to # connect", "database. # 4. Test runner execute our unit test code.", "def __init__(self): self.call_counter = 0 self.call_args = None def __call__(self,", "1) def test_pre_syncdb_args(self): r = PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE,", "sender=models) class SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1) def test_pre_syncdb_args(self): r", "import signals from django.test import TestCase from django.core import management", "That is, sequence of # actions would be: # #", "some of them. \"\"\" def __init__(self): self.call_counter = 0 self.call_args", "# 3. Test runner calls syncdb for create default database.", "we need to test only one call of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver,", "need to # connect receiver before test runner creates database.", "imports this module. # 2. We connect receiver. # 3.", "default database. # 4. Test runner execute our unit test", "only one of them is quite sufficient. if kwargs['db'] ==", "2. We connect receiver. # 3. Test runner calls syncdb", "self.call_counter = self.call_counter + 1 self.call_args = kwargs # we", "and not in unit test code because we need to", "management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO()) args = r.call_args self.assertEqual(r.call_counter,", "1) self.assertEqual(set(args), set(PRE_SYNCDB_ARGS)) self.assertEqual(args['app'], models) self.assertEqual(args['verbosity'], SYNCDB_VERBOSITY) self.assertEqual(args['interactive'], SYNCDB_INTERACTIVE) self.assertEqual(args['db'],", "testing for only one of them is quite sufficient. if", "execute our unit test code. pre_syncdb_receiver = OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models)", "import TestCase from django.core import management from django.utils import six", "databases and several times for some of them. \"\"\" def", "self.call_counter = 0 self.call_args = None def __call__(self, signal, sender,", "__call__(self, signal, sender, **kwargs): self.call_counter = self.call_counter + 1 self.call_args", "need to test only one call of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models)", "shared_models import models PRE_SYNCDB_ARGS = ['app', 'create_models', 'verbosity', 'interactive', 'db']", "Test runner imports this module. # 2. We connect receiver.", "syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) # We connect receiver here and not", "'create_models', 'verbosity', 'interactive', 'db'] SYNCDB_DATABASE = 'default' SYNCDB_VERBOSITY = 1", "for some of them. \"\"\" def __init__(self): self.call_counter = 0", "self.call_args = None def __call__(self, signal, sender, **kwargs): # Although", "calls syncdb for several databases, # testing for only one", "of them is quite sufficient. if kwargs['db'] == SYNCDB_DATABASE: self.call_counter", "runner creates database. That is, sequence of # actions would", "actions would be: # # 1. Test runner imports this", "receiver. # 3. Test runner calls syncdb for create default", "from django.test import TestCase from django.core import management from django.utils", "self.assertEqual(set(args), set(PRE_SYNCDB_ARGS)) self.assertEqual(args['app'], models) self.assertEqual(args['verbosity'], SYNCDB_VERBOSITY) self.assertEqual(args['interactive'], SYNCDB_INTERACTIVE) self.assertEqual(args['db'], 'default')", "TestCase from django.core import management from django.utils import six from", "__call__(self, signal, sender, **kwargs): # Although test runner calls syncdb", "= None def __call__(self, signal, sender, **kwargs): self.call_counter = self.call_counter", "We connect receiver. # 3. Test runner calls syncdb for", "test runner creates database. That is, sequence of # actions", "of # actions would be: # # 1. Test runner", "interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO()) args = r.call_args self.assertEqual(r.call_counter, 1) self.assertEqual(set(args), set(PRE_SYNCDB_ARGS))", "'db'] SYNCDB_DATABASE = 'default' SYNCDB_VERBOSITY = 1 SYNCDB_INTERACTIVE = False", "one call of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) # We connect receiver", "OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1) def", "and several times for some of them. \"\"\" def __init__(self):", "# Although test runner calls syncdb for several databases, #", "self.call_args = kwargs class OneTimeReceiver(object): \"\"\" Special receiver for handle", "receiver for handle the fact that test runner calls syncdb", "1. Test runner imports this module. # 2. We connect", "'interactive', 'db'] SYNCDB_DATABASE = 'default' SYNCDB_VERBOSITY = 1 SYNCDB_INTERACTIVE =", "test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1) def test_pre_syncdb_args(self): r = PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models)", "test code. pre_syncdb_receiver = OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class SyncdbSignalTests(TestCase): def", "stdout=six.StringIO()) args = r.call_args self.assertEqual(r.call_counter, 1) self.assertEqual(set(args), set(PRE_SYNCDB_ARGS)) self.assertEqual(args['app'], models)", "signal, sender, **kwargs): # Although test runner calls syncdb for", "def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1) def test_pre_syncdb_args(self): r = PreSyncdbReceiver() signals.pre_syncdb.connect(r,", "django.utils import six from shared_models import models PRE_SYNCDB_ARGS = ['app',", "quite sufficient. if kwargs['db'] == SYNCDB_DATABASE: self.call_counter = self.call_counter +", "+ 1 self.call_args = kwargs # we need to test", "we need to # connect receiver before test runner creates", "import six from shared_models import models PRE_SYNCDB_ARGS = ['app', 'create_models',", "is quite sufficient. if kwargs['db'] == SYNCDB_DATABASE: self.call_counter = self.call_counter", "only one call of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) # We connect", "for handle the fact that test runner calls syncdb for", "# connect receiver before test runner creates database. That is,", "sender, **kwargs): # Although test runner calls syncdb for several", "self.assertEqual(pre_syncdb_receiver.call_counter, 1) def test_pre_syncdb_args(self): r = PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb',", "import management from django.utils import six from shared_models import models", "# 2. We connect receiver. # 3. Test runner calls", "# we need to test only one call of syncdb", "receiver before test runner creates database. That is, sequence of", "+ 1 self.call_args = kwargs class OneTimeReceiver(object): \"\"\" Special receiver", "several databases, # testing for only one of them is", "test runner calls syncdb for several databases, # testing for", "load_initial_data=False, stdout=six.StringIO()) args = r.call_args self.assertEqual(r.call_counter, 1) self.assertEqual(set(args), set(PRE_SYNCDB_ARGS)) self.assertEqual(args['app'],", "sequence of # actions would be: # # 1. Test", "def test_pre_syncdb_args(self): r = PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY,", "sufficient. if kwargs['db'] == SYNCDB_DATABASE: self.call_counter = self.call_counter + 1", "= False class PreSyncdbReceiver(object): def __init__(self): self.call_counter = 0 self.call_args", "databases, # testing for only one of them is quite", "from shared_models import models PRE_SYNCDB_ARGS = ['app', 'create_models', 'verbosity', 'interactive',", "signals from django.test import TestCase from django.core import management from", "sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO()) args = r.call_args" ]
[ "\"127.0.0.1\" root_node = self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] = root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return", "last 4 numbers in the job id as the id", "if default_port: # use the last 4 numbers in the", "os.environ.get(\"SLURM_JOB_ID\") if default_port: # use the last 4 numbers in", "all ports should be in the 10k+ range default_port =", "2.0 (the \"License\"); # you may not use this file", "os.environ[\"MASTER_PORT\"] = str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return int(default_port) def world_size(self) ->", "size: int) -> None: log.debug(\"SLURMEnvironment.set_world_size was called, but setting world", "None: log.debug(\"SLURMEnvironment.set_global_rank was called, but setting global rank is not", "use the last 4 numbers in the job id as", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "@property def creates_processes_externally(self) -> bool: return True @staticmethod def detect()", "= root_node.split(\"[\", maxsplit=1) number = numbers.split(\",\", maxsplit=1)[0] if \"-\" in", "slurm_nodelist = os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist: root_node = slurm_nodelist.split(\" \")[0].split(\",\")[0] else:", "int) -> None: log.debug(\"SLURMEnvironment.set_global_rank was called, but setting global rank", "world_size(self) -> int: return int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self, size: int) ->", "in the job id as the id default_port = default_port[-4:]", "under the License. import logging import os import re from", "----------------------- # in case the user passed it in if", "@staticmethod def detect() -> bool: \"\"\"Returns ``True`` if the current", "\"[\" in root_node: name, numbers = root_node.split(\"[\", maxsplit=1) number =", "setting global rank is not allowed. Ignored.\") def local_rank(self) ->", "= re.sub(\"[^0-9]\", \"\", number) root_node = name + number return", "use this file except in compliance with the License. #", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "= int(default_port) + 15000 else: default_port = 12910 # -----------------------", "License. # You may obtain a copy of the License", "Ignored.\") def local_rank(self) -> int: return int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self) ->", "return int(default_port) def world_size(self) -> int: return int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self,", "= self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] = root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return root_node @property", "creates_processes_externally(self) -> bool: return True @staticmethod def detect() -> bool:", "{os.environ['MASTER_ADDR']}\") return root_node @property def main_port(self) -> int: # -----------------------", "id as the id default_port = default_port[-4:] # all ports", "under the License is distributed on an \"AS IS\" BASIS,", "os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"] = str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return int(default_port) def", "License for the specific language governing permissions and # limitations", "= os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"] = str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return int(default_port)", "rank is not allowed. Ignored.\") def local_rank(self) -> int: return", "return int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self, size: int) -> None: log.debug(\"SLURMEnvironment.set_world_size was", "was called, but setting global rank is not allowed. Ignored.\")", "= \"127.0.0.1\" root_node = self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] = root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\")", "the 10k+ range default_port = int(default_port) + 15000 else: default_port", "was called, but setting world size is not allowed. Ignored.\")", "-> str: if \"[\" in root_node: name, numbers = root_node.split(\"[\",", "# ----------------------- # this way every process knows what port", "addr slurm_nodelist = os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist: root_node = slurm_nodelist.split(\" \")[0].split(\",\")[0]", "root_node: name, numbers = root_node.split(\"[\", maxsplit=1) number = numbers.split(\",\", maxsplit=1)[0]", "in os.environ @property def main_address(self) -> str: # figure out", "= os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist: root_node = slurm_nodelist.split(\" \")[0].split(\",\")[0] else: root_node", "for training on a cluster managed by SLURM.\"\"\" @property def", "# in case the user passed it in if \"MASTER_PORT\"", "str: # figure out the root node addr slurm_nodelist =", "int: return int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self, root_node: str) -> str: if", "\"SLURM_NTASKS\" in os.environ @property def main_address(self) -> str: # figure", "in compliance with the License. # You may obtain a", "os.environ: default_port = os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"] = str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\")", "software # distributed under the License is distributed on an", "if \"-\" in number: number = number.split(\"-\")[0] number = re.sub(\"[^0-9]\",", "by SLURM.\"\"\" @property def creates_processes_externally(self) -> bool: return True @staticmethod", "= slurm_nodelist.split(\" \")[0].split(\",\")[0] else: root_node = \"127.0.0.1\" root_node = self.resolve_root_node_address(root_node)", "re from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment log = logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment):", "os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist: root_node = slurm_nodelist.split(\" \")[0].split(\",\")[0] else: root_node =", "def set_global_rank(self, rank: int) -> None: log.debug(\"SLURMEnvironment.set_global_rank was called, but", "SLURM JOB = PORT number # ----------------------- # this way", "in if \"MASTER_PORT\" in os.environ: default_port = os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"]", "# ----------------------- # SLURM JOB = PORT number # -----------------------", "launched on a SLURM cluster.\"\"\" return \"SLURM_NTASKS\" in os.environ @property", "slurm_nodelist.split(\" \")[0].split(\",\")[0] else: root_node = \"127.0.0.1\" root_node = self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"]", "import re from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment log = logging.getLogger(__name__) class", "cluster managed by SLURM.\"\"\" @property def creates_processes_externally(self) -> bool: return", "job id as the id default_port = default_port[-4:] # all", "pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment log = logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment", "setting world size is not allowed. Ignored.\") def global_rank(self) ->", "in root_node: name, numbers = root_node.split(\"[\", maxsplit=1) number = numbers.split(\",\",", "else: os.environ[\"MASTER_PORT\"] = str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return int(default_port) def world_size(self)", "but setting world size is not allowed. Ignored.\") def global_rank(self)", "10k+ range default_port = int(default_port) + 15000 else: default_port =", "be in the 10k+ range default_port = int(default_port) + 15000", "os.environ[\"MASTER_ADDR\"] = root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return root_node @property def main_port(self)", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "managed by SLURM.\"\"\" @property def creates_processes_externally(self) -> bool: return True", "use default_port = os.environ.get(\"SLURM_JOB_ID\") if default_port: # use the last", "not allowed. Ignored.\") def local_rank(self) -> int: return int(os.environ[\"SLURM_LOCALID\"]) def", "the License. import logging import os import re from pytorch_lightning.plugins.environments.cluster_environment", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "# use the last 4 numbers in the job id", "passed it in if \"MASTER_PORT\" in os.environ: default_port = os.environ[\"MASTER_PORT\"]", "to in writing, software # distributed under the License is", "default_port = os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"] = str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return", "numbers.split(\",\", maxsplit=1)[0] if \"-\" in number: number = number.split(\"-\")[0] number", "# See the License for the specific language governing permissions", "or agreed to in writing, software # distributed under the", "required by applicable law or agreed to in writing, software", "current process was launched on a SLURM cluster.\"\"\" return \"SLURM_NTASKS\"", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "# figure out the root node addr slurm_nodelist = os.environ.get(\"SLURM_NODELIST\")", "with the License. # You may obtain a copy of", "log = logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment for training on", "on a SLURM cluster.\"\"\" return \"SLURM_NTASKS\" in os.environ @property def", "SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment for training on a cluster managed by", "maxsplit=1)[0] if \"-\" in number: number = number.split(\"-\")[0] number =", "figure out the root node addr slurm_nodelist = os.environ.get(\"SLURM_NODELIST\") if", "os.environ @property def main_address(self) -> str: # figure out the", "int: # ----------------------- # SLURM JOB = PORT number #", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "= MASTER_PORT # ----------------------- # in case the user passed", "global_rank(self) -> int: return int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self, rank: int) ->", "return True @staticmethod def detect() -> bool: \"\"\"Returns ``True`` if", "distributed under the License is distributed on an \"AS IS\"", "-> int: return int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self) -> int: return int(os.environ[\"SLURM_NODEID\"])", "in case the user passed it in if \"MASTER_PORT\" in", "-> bool: \"\"\"Returns ``True`` if the current process was launched", "Copyright The PyTorch Lightning team. # # Licensed under the", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "if slurm_nodelist: root_node = slurm_nodelist.split(\" \")[0].split(\",\")[0] else: root_node = \"127.0.0.1\"", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "number.split(\"-\")[0] number = re.sub(\"[^0-9]\", \"\", number) root_node = name +", "not use this file except in compliance with the License.", "writing, software # distributed under the License is distributed on", "you may not use this file except in compliance with", "bool: return True @staticmethod def detect() -> bool: \"\"\"Returns ``True``", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "node addr slurm_nodelist = os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist: root_node = slurm_nodelist.split(\"", "to use default_port = os.environ.get(\"SLURM_JOB_ID\") if default_port: # use the", "slurm_nodelist: root_node = slurm_nodelist.split(\" \")[0].split(\",\")[0] else: root_node = \"127.0.0.1\" root_node", "rank: int) -> None: log.debug(\"SLURMEnvironment.set_global_rank was called, but setting global", "return \"SLURM_NTASKS\" in os.environ @property def main_address(self) -> str: #", "PyTorch Lightning team. # # Licensed under the Apache License,", "local_rank(self) -> int: return int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self) -> int: return", "CONDITIONS OF ANY KIND, either express or implied. # See", "port to use default_port = os.environ.get(\"SLURM_JOB_ID\") if default_port: # use", "the root node addr slurm_nodelist = os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist: root_node", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "default_port = 12910 # ----------------------- # PORT NUMBER = MASTER_PORT", "in number: number = number.split(\"-\")[0] number = re.sub(\"[^0-9]\", \"\", number)", "= 12910 # ----------------------- # PORT NUMBER = MASTER_PORT #", "allowed. Ignored.\") def local_rank(self) -> int: return int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self)", "root_node = self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] = root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return root_node", "def main_port(self) -> int: # ----------------------- # SLURM JOB =", "id default_port = default_port[-4:] # all ports should be in", "# limitations under the License. import logging import os import", "OR CONDITIONS OF ANY KIND, either express or implied. #", "SLURM.\"\"\" @property def creates_processes_externally(self) -> bool: return True @staticmethod def", "process was launched on a SLURM cluster.\"\"\" return \"SLURM_NTASKS\" in", "what port to use default_port = os.environ.get(\"SLURM_JOB_ID\") if default_port: #", "the License is distributed on an \"AS IS\" BASIS, #", "root_node.split(\"[\", maxsplit=1) number = numbers.split(\",\", maxsplit=1)[0] if \"-\" in number:", "logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment for training on a cluster", "governing permissions and # limitations under the License. import logging", "import ClusterEnvironment log = logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment for", "if \"[\" in root_node: name, numbers = root_node.split(\"[\", maxsplit=1) number", "-> int: # ----------------------- # SLURM JOB = PORT number", "log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return int(default_port) def world_size(self) -> int: return int(os.environ[\"SLURM_NTASKS\"])", "law or agreed to in writing, software # distributed under", "process knows what port to use default_port = os.environ.get(\"SLURM_JOB_ID\") if", "int: return int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self, size: int) -> None: log.debug(\"SLURMEnvironment.set_world_size", "str: if \"[\" in root_node: name, numbers = root_node.split(\"[\", maxsplit=1)", "= str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return int(default_port) def world_size(self) -> int:", "# this way every process knows what port to use", "def node_rank(self) -> int: return int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self, root_node: str)", "if the current process was launched on a SLURM cluster.\"\"\"", "may obtain a copy of the License at # #", "----------------------- # this way every process knows what port to", "None: log.debug(\"SLURMEnvironment.set_world_size was called, but setting world size is not", "the job id as the id default_port = default_port[-4:] #", "return int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self, root_node: str) -> str: if \"[\"", "= default_port[-4:] # all ports should be in the 10k+", "str) -> str: if \"[\" in root_node: name, numbers =", "return int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self, rank: int) -> None: log.debug(\"SLURMEnvironment.set_global_rank was", "= numbers.split(\",\", maxsplit=1)[0] if \"-\" in number: number = number.split(\"-\")[0]", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] = root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return root_node @property def", "int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self, root_node: str) -> str: if \"[\" in", "may not use this file except in compliance with the", "the last 4 numbers in the job id as the", "SLURM cluster.\"\"\" return \"SLURM_NTASKS\" in os.environ @property def main_address(self) ->", "range default_port = int(default_port) + 15000 else: default_port = 12910", "bool: \"\"\"Returns ``True`` if the current process was launched on", "is not allowed. Ignored.\") def global_rank(self) -> int: return int(os.environ[\"SLURM_PROCID\"])", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "PORT NUMBER = MASTER_PORT # ----------------------- # in case the", "= root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return root_node @property def main_port(self) ->", "root_node: str) -> str: if \"[\" in root_node: name, numbers", "this file except in compliance with the License. # You", "\"\"\"Returns ``True`` if the current process was launched on a", "but setting global rank is not allowed. Ignored.\") def local_rank(self)", "the current process was launched on a SLURM cluster.\"\"\" return", "world size is not allowed. Ignored.\") def global_rank(self) -> int:", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "Lightning team. # # Licensed under the Apache License, Version", "# # Licensed under the Apache License, Version 2.0 (the", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "@property def main_port(self) -> int: # ----------------------- # SLURM JOB", "= number.split(\"-\")[0] number = re.sub(\"[^0-9]\", \"\", number) root_node = name", "@property def main_address(self) -> str: # figure out the root", "limitations under the License. import logging import os import re", "case the user passed it in if \"MASTER_PORT\" in os.environ:", "global rank is not allowed. Ignored.\") def local_rank(self) -> int:", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "out the root node addr slurm_nodelist = os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist:", "Ignored.\") def global_rank(self) -> int: return int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self, rank:", "team. # # Licensed under the Apache License, Version 2.0", "License. import logging import os import re from pytorch_lightning.plugins.environments.cluster_environment import", "ClusterEnvironment log = logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment for training", "\"MASTER_PORT\" in os.environ: default_port = os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"] = str(default_port)", "# ----------------------- # in case the user passed it in", "language governing permissions and # limitations under the License. import", "user passed it in if \"MASTER_PORT\" in os.environ: default_port =", "# PORT NUMBER = MASTER_PORT # ----------------------- # in case", "root_node = slurm_nodelist.split(\" \")[0].split(\",\")[0] else: root_node = \"127.0.0.1\" root_node =", "root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return root_node @property def main_port(self) -> int:", "is not allowed. Ignored.\") def local_rank(self) -> int: return int(os.environ[\"SLURM_LOCALID\"])", "else: default_port = 12910 # ----------------------- # PORT NUMBER =", "the id default_port = default_port[-4:] # all ports should be", "resolve_root_node_address(self, root_node: str) -> str: if \"[\" in root_node: name,", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "maxsplit=1) number = numbers.split(\",\", maxsplit=1)[0] if \"-\" in number: number", "or implied. # See the License for the specific language", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "set_world_size(self, size: int) -> None: log.debug(\"SLURMEnvironment.set_world_size was called, but setting", "way every process knows what port to use default_port =", "NUMBER = MASTER_PORT # ----------------------- # in case the user", "root_node @property def main_port(self) -> int: # ----------------------- # SLURM", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "MASTER_PORT # ----------------------- # in case the user passed it", "from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment log = logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster", "import logging import os import re from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment", "knows what port to use default_port = os.environ.get(\"SLURM_JOB_ID\") if default_port:", "15000 else: default_port = 12910 # ----------------------- # PORT NUMBER", "(the \"License\"); # you may not use this file except", "-> None: log.debug(\"SLURMEnvironment.set_world_size was called, but setting world size is", "# you may not use this file except in compliance", "number = re.sub(\"[^0-9]\", \"\", number) root_node = name + number", "number = numbers.split(\",\", maxsplit=1)[0] if \"-\" in number: number =", "cluster.\"\"\" return \"SLURM_NTASKS\" in os.environ @property def main_address(self) -> str:", "in os.environ: default_port = os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"] = str(default_port) log.debug(f\"MASTER_PORT:", "default_port: # use the last 4 numbers in the job", "on a cluster managed by SLURM.\"\"\" @property def creates_processes_externally(self) ->", "\"\"\"Cluster environment for training on a cluster managed by SLURM.\"\"\"", "node_rank(self) -> int: return int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self, root_node: str) ->", "# Copyright The PyTorch Lightning team. # # Licensed under", "numbers = root_node.split(\"[\", maxsplit=1) number = numbers.split(\",\", maxsplit=1)[0] if \"-\"", "# # Unless required by applicable law or agreed to", "-> bool: return True @staticmethod def detect() -> bool: \"\"\"Returns", "+ 15000 else: default_port = 12910 # ----------------------- # PORT", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "default_port = int(default_port) + 15000 else: default_port = 12910 #", "Version 2.0 (the \"License\"); # you may not use this", "\"-\" in number: number = number.split(\"-\")[0] number = re.sub(\"[^0-9]\", \"\",", "else: root_node = \"127.0.0.1\" root_node = self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] = root_node", "called, but setting world size is not allowed. Ignored.\") def", "log.debug(\"SLURMEnvironment.set_global_rank was called, but setting global rank is not allowed.", "if \"MASTER_PORT\" in os.environ: default_port = os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"] =", "implied. # See the License for the specific language governing", "under the Apache License, Version 2.0 (the \"License\"); # you", "def local_rank(self) -> int: return int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self) -> int:", "int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self, size: int) -> None: log.debug(\"SLURMEnvironment.set_world_size was called,", "in the 10k+ range default_port = int(default_port) + 15000 else:", "it in if \"MASTER_PORT\" in os.environ: default_port = os.environ[\"MASTER_PORT\"] else:", "def main_address(self) -> str: # figure out the root node", "numbers in the job id as the id default_port =", "by applicable law or agreed to in writing, software #", "should be in the 10k+ range default_port = int(default_port) +", "-> int: return int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self, root_node: str) -> str:", "import os import re from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment log =", "a cluster managed by SLURM.\"\"\" @property def creates_processes_externally(self) -> bool:", "allowed. Ignored.\") def global_rank(self) -> int: return int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self,", "# all ports should be in the 10k+ range default_port", "size is not allowed. Ignored.\") def global_rank(self) -> int: return", "``True`` if the current process was launched on a SLURM", "log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return root_node @property def main_port(self) -> int: #", "def creates_processes_externally(self) -> bool: return True @staticmethod def detect() ->", "re.sub(\"[^0-9]\", \"\", number) root_node = name + number return root_node", "int: return int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self) -> int: return int(os.environ[\"SLURM_NODEID\"]) def", "str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return int(default_port) def world_size(self) -> int: return", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "def detect() -> bool: \"\"\"Returns ``True`` if the current process", "a SLURM cluster.\"\"\" return \"SLURM_NTASKS\" in os.environ @property def main_address(self)", "int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self, rank: int) -> None: log.debug(\"SLURMEnvironment.set_global_rank was called,", "Unless required by applicable law or agreed to in writing,", "= PORT number # ----------------------- # this way every process", "def global_rank(self) -> int: return int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self, rank: int)", "----------------------- # PORT NUMBER = MASTER_PORT # ----------------------- # in", "-> str: # figure out the root node addr slurm_nodelist", "the specific language governing permissions and # limitations under the", "environment for training on a cluster managed by SLURM.\"\"\" @property", "int(default_port) def world_size(self) -> int: return int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self, size:", "def world_size(self) -> int: return int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self, size: int)", "applicable law or agreed to in writing, software # distributed", "----------------------- # SLURM JOB = PORT number # ----------------------- #", "in writing, software # distributed under the License is distributed", "main_port(self) -> int: # ----------------------- # SLURM JOB = PORT", "and # limitations under the License. import logging import os", "main_address(self) -> str: # figure out the root node addr", "every process knows what port to use default_port = os.environ.get(\"SLURM_JOB_ID\")", "\")[0].split(\",\")[0] else: root_node = \"127.0.0.1\" root_node = self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] =", "int) -> None: log.debug(\"SLURMEnvironment.set_world_size was called, but setting world size", "JOB = PORT number # ----------------------- # this way every", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "number: number = number.split(\"-\")[0] number = re.sub(\"[^0-9]\", \"\", number) root_node", "# You may obtain a copy of the License at", "default_port = os.environ.get(\"SLURM_JOB_ID\") if default_port: # use the last 4", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "def resolve_root_node_address(self, root_node: str) -> str: if \"[\" in root_node:", "ports should be in the 10k+ range default_port = int(default_port)", "-> None: log.debug(\"SLURMEnvironment.set_global_rank was called, but setting global rank is", "detect() -> bool: \"\"\"Returns ``True`` if the current process was", "-> int: return int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self, rank: int) -> None:", "root node addr slurm_nodelist = os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist: root_node =", "the License for the specific language governing permissions and #", "root_node = \"127.0.0.1\" root_node = self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] = root_node log.debug(f\"MASTER_ADDR:", "Apache License, Version 2.0 (the \"License\"); # you may not", "12910 # ----------------------- # PORT NUMBER = MASTER_PORT # -----------------------", "either express or implied. # See the License for the", "def set_world_size(self, size: int) -> None: log.debug(\"SLURMEnvironment.set_world_size was called, but", "set_global_rank(self, rank: int) -> None: log.debug(\"SLURMEnvironment.set_global_rank was called, but setting", "int(default_port) + 15000 else: default_port = 12910 # ----------------------- #", "as the id default_port = default_port[-4:] # all ports should", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "= os.environ.get(\"SLURM_JOB_ID\") if default_port: # use the last 4 numbers", "{os.environ['MASTER_PORT']}\") return int(default_port) def world_size(self) -> int: return int(os.environ[\"SLURM_NTASKS\"]) def", "= logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment for training on a", "int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self) -> int: return int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self, root_node:", "return root_node @property def main_port(self) -> int: # ----------------------- #", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "permissions and # limitations under the License. import logging import", "number # ----------------------- # this way every process knows what", "called, but setting global rank is not allowed. Ignored.\") def", "class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment for training on a cluster managed", "default_port = default_port[-4:] # all ports should be in the", "\"License\"); # you may not use this file except in", "logging import os import re from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment log", "number = number.split(\"-\")[0] number = re.sub(\"[^0-9]\", \"\", number) root_node =", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "The PyTorch Lightning team. # # Licensed under the Apache", "4 numbers in the job id as the id default_port", "# distributed under the License is distributed on an \"AS", "the user passed it in if \"MASTER_PORT\" in os.environ: default_port", "# Unless required by applicable law or agreed to in", "training on a cluster managed by SLURM.\"\"\" @property def creates_processes_externally(self)", "# SLURM JOB = PORT number # ----------------------- # this", "name, numbers = root_node.split(\"[\", maxsplit=1) number = numbers.split(\",\", maxsplit=1)[0] if", "PORT number # ----------------------- # this way every process knows", "return int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self) -> int: return int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self,", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "this way every process knows what port to use default_port", "# ----------------------- # PORT NUMBER = MASTER_PORT # ----------------------- #", "int: return int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self, rank: int) -> None: log.debug(\"SLURMEnvironment.set_global_rank", "not allowed. Ignored.\") def global_rank(self) -> int: return int(os.environ[\"SLURM_PROCID\"]) def", "You may obtain a copy of the License at #", "was launched on a SLURM cluster.\"\"\" return \"SLURM_NTASKS\" in os.environ", "True @staticmethod def detect() -> bool: \"\"\"Returns ``True`` if the", "os import re from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment log = logging.getLogger(__name__)", "default_port[-4:] # all ports should be in the 10k+ range", "the Apache License, Version 2.0 (the \"License\"); # you may", "log.debug(\"SLURMEnvironment.set_world_size was called, but setting world size is not allowed.", "-> int: return int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self, size: int) -> None:" ]
[ "python import time import os import math from trackball import", "xautomation?\") while True: up, down, left, right, switch, state =", "math.copysign(y**2, y) cmd = 'xte \"mousermove {} {}\"'.format(int(x), int(y)) os.system(cmd)", "when the switch is pressed. Press Ctrl+C to exit! \"\"\")", "= 'xte \"mouseclick 1\"' os.system(cmd) elif right or up or", "up or left or down: x = right - left", "= math.copysign(x**2, x) y = down - up y =", "TrackBall print(\"\"\"Trackball: Mouse Use the trackball as a mouse in", "0, 0) # Check for xte (used to control mouse)", "= os.system('which xte') == 0 if use_xte == 0: raise", "right-click when the switch is pressed. Press Ctrl+C to exit!", "to exit! \"\"\") trackball = TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0, 0, 0)", "with right-click when the switch is pressed. Press Ctrl+C to", "in Raspbian, with right-click when the switch is pressed. Press", "\"\"\") trackball = TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0, 0, 0) # Check", "xte') == 0 if use_xte == 0: raise RuntimeError(\"xte not", "mouse) use_xte = os.system('which xte') == 0 if use_xte ==", "switch: cmd = 'xte \"mouseclick 1\"' os.system(cmd) elif right or", "use_xte == 0: raise RuntimeError(\"xte not found. Did you sudo", "TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0, 0, 0) # Check for xte (used", "1\"' os.system(cmd) elif right or up or left or down:", "import math from trackball import TrackBall print(\"\"\"Trackball: Mouse Use the", "right, switch, state = trackball.read() # Send movements and clicks", "- left x = math.copysign(x**2, x) y = down -", "the trackball as a mouse in Raspbian, with right-click when", "import TrackBall print(\"\"\"Trackball: Mouse Use the trackball as a mouse", "to xte if switch: cmd = 'xte \"mouseclick 1\"' os.system(cmd)", "up y = math.copysign(y**2, y) cmd = 'xte \"mousermove {}", "left x = math.copysign(x**2, x) y = down - up", "the switch is pressed. Press Ctrl+C to exit! \"\"\") trackball", "Ctrl+C to exit! \"\"\") trackball = TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0, 0,", "down, left, right, switch, state = trackball.read() # Send movements", "#!/usr/bin/env python import time import os import math from trackball", "Mouse Use the trackball as a mouse in Raspbian, with", "for xte (used to control mouse) use_xte = os.system('which xte')", "= right - left x = math.copysign(x**2, x) y =", "trackball import TrackBall print(\"\"\"Trackball: Mouse Use the trackball as a", "Use the trackball as a mouse in Raspbian, with right-click", "import time import os import math from trackball import TrackBall", "xte if switch: cmd = 'xte \"mouseclick 1\"' os.system(cmd) elif", "a mouse in Raspbian, with right-click when the switch is", "pressed. Press Ctrl+C to exit! \"\"\") trackball = TrackBall(interrupt_pin=4) trackball.set_rgbw(0,", "Raspbian, with right-click when the switch is pressed. Press Ctrl+C", "found. Did you sudo apt install xautomation?\") while True: up,", "if switch: cmd = 'xte \"mouseclick 1\"' os.system(cmd) elif right", "== 0: raise RuntimeError(\"xte not found. Did you sudo apt", "0) # Check for xte (used to control mouse) use_xte", "left, right, switch, state = trackball.read() # Send movements and", "install xautomation?\") while True: up, down, left, right, switch, state", "= math.copysign(y**2, y) cmd = 'xte \"mousermove {} {}\"'.format(int(x), int(y))", "Press Ctrl+C to exit! \"\"\") trackball = TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0,", "0, 0, 0) # Check for xte (used to control", "== 0 if use_xte == 0: raise RuntimeError(\"xte not found.", "use_xte = os.system('which xte') == 0 if use_xte == 0:", "Send movements and clicks to xte if switch: cmd =", "right - left x = math.copysign(x**2, x) y = down", "y = down - up y = math.copysign(y**2, y) cmd", "cmd = 'xte \"mouseclick 1\"' os.system(cmd) elif right or up", "switch, state = trackball.read() # Send movements and clicks to", "or down: x = right - left x = math.copysign(x**2,", "exit! \"\"\") trackball = TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0, 0, 0) #", "and clicks to xte if switch: cmd = 'xte \"mouseclick", "= trackball.read() # Send movements and clicks to xte if", "while True: up, down, left, right, switch, state = trackball.read()", "switch is pressed. Press Ctrl+C to exit! \"\"\") trackball =", "print(\"\"\"Trackball: Mouse Use the trackball as a mouse in Raspbian,", "Check for xte (used to control mouse) use_xte = os.system('which", "0 if use_xte == 0: raise RuntimeError(\"xte not found. Did", "trackball.read() # Send movements and clicks to xte if switch:", "clicks to xte if switch: cmd = 'xte \"mouseclick 1\"'", "Did you sudo apt install xautomation?\") while True: up, down,", "is pressed. Press Ctrl+C to exit! \"\"\") trackball = TrackBall(interrupt_pin=4)", "or left or down: x = right - left x", "mouse in Raspbian, with right-click when the switch is pressed.", "as a mouse in Raspbian, with right-click when the switch", "time import os import math from trackball import TrackBall print(\"\"\"Trackball:", "movements and clicks to xte if switch: cmd = 'xte", "0: raise RuntimeError(\"xte not found. Did you sudo apt install", "<filename>examples/mouse.py #!/usr/bin/env python import time import os import math from", "raise RuntimeError(\"xte not found. Did you sudo apt install xautomation?\")", "you sudo apt install xautomation?\") while True: up, down, left,", "xte (used to control mouse) use_xte = os.system('which xte') ==", "y = math.copysign(y**2, y) cmd = 'xte \"mousermove {} {}\"'.format(int(x),", "\"mouseclick 1\"' os.system(cmd) elif right or up or left or", "math from trackball import TrackBall print(\"\"\"Trackball: Mouse Use the trackball", "trackball = TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0, 0, 0) # Check for", "os.system('which xte') == 0 if use_xte == 0: raise RuntimeError(\"xte", "if use_xte == 0: raise RuntimeError(\"xte not found. Did you", "not found. Did you sudo apt install xautomation?\") while True:", "# Check for xte (used to control mouse) use_xte =", "# Send movements and clicks to xte if switch: cmd", "y) cmd = 'xte \"mousermove {} {}\"'.format(int(x), int(y)) os.system(cmd) time.sleep(0.0001)", "up, down, left, right, switch, state = trackball.read() # Send", "os import math from trackball import TrackBall print(\"\"\"Trackball: Mouse Use", "or up or left or down: x = right -", "down - up y = math.copysign(y**2, y) cmd = 'xte", "x) y = down - up y = math.copysign(y**2, y)", "= TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0, 0, 0) # Check for xte", "control mouse) use_xte = os.system('which xte') == 0 if use_xte", "apt install xautomation?\") while True: up, down, left, right, switch,", "to control mouse) use_xte = os.system('which xte') == 0 if", "import os import math from trackball import TrackBall print(\"\"\"Trackball: Mouse", "right or up or left or down: x = right", "math.copysign(x**2, x) y = down - up y = math.copysign(y**2,", "RuntimeError(\"xte not found. Did you sudo apt install xautomation?\") while", "from trackball import TrackBall print(\"\"\"Trackball: Mouse Use the trackball as", "trackball.set_rgbw(0, 0, 0, 0) # Check for xte (used to", "os.system(cmd) elif right or up or left or down: x", "left or down: x = right - left x =", "x = math.copysign(x**2, x) y = down - up y", "True: up, down, left, right, switch, state = trackball.read() #", "- up y = math.copysign(y**2, y) cmd = 'xte \"mousermove", "x = right - left x = math.copysign(x**2, x) y", "(used to control mouse) use_xte = os.system('which xte') == 0", "elif right or up or left or down: x =", "= down - up y = math.copysign(y**2, y) cmd =", "sudo apt install xautomation?\") while True: up, down, left, right,", "'xte \"mouseclick 1\"' os.system(cmd) elif right or up or left", "trackball as a mouse in Raspbian, with right-click when the", "state = trackball.read() # Send movements and clicks to xte", "down: x = right - left x = math.copysign(x**2, x)" ]
[ "spec. Return: list[str]: List of key(str) for the network outputs.", "tfp from garage.experiment import deterministic from garage.tf.models import GaussianMLPModel class", "function for the weight of output dense layer(s) in the", "trainable=False) y_std_var = tf.compat.v1.get_variable( name='y_std_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(), trainable=False)", "a tf.Tensor. learn_std (bool): Is std trainable. init_std (float): Initial", "weight of intermediate dense layer(s) in the std network. std_hidden_b_init", "adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity,", "layer(s). The function should return a tf.Tensor. output_nonlinearity (callable): Activation", "state input. name (str): Inner model name, also the variable", "Args: state_input (tf.Tensor): Place holder for state input. name (str):", "GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based on garage.tf.models.Model class. This class can be", "output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False, std_share_network=False, init_std=1.0, min_std=1e-6, max_std=None, std_hidden_sizes=(32,", "min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape = input_shape", "'mean', 'log_std', 'x_mean', 'x_std', 'y_mean', 'y_std' ] def _build(self, state_input,", "It should return a tf.Tensor. Set it to None to", "Set it to None to maintain a linear activation. output_w_init", "layers, each with 32 hidden units. min_std (float): If not", "of min_std, to avoid numerical issues. max_std (float): If not", "exp: the logarithm of the std will be stored, and", "tf.name_scope('std_network'): log_stds_var = normalized_dist_log_std + tf.math.log(y_std_var) normalized_dist = tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean,", "bias of intermediate dense layer(s) in the std network. std_output_nonlinearity", "dimension of dense layer(s) for the MLP for mean. For", "x_std_var _, normalized_dist_mean, normalized_dist_log_std = super()._build( normalized_xs_var) # Since regressor", "output_nonlinearity (callable): Activation function for output dense layer. It should", "Name of the newly created model. It has to be", "name='x_mean', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) x_std_var =", "and applied a exponential transformation - softplus: the std will", "perform regression by fitting a Gaussian distribution to the outputs.", "of two hidden layers, each with 32 hidden units. hidden_nonlinearity", "for using layer normalization or not. \"\"\" def __init__(self, input_shape,", "input_shape def network_output_spec(self): \"\"\"Network output spec. Return: list[str]: List of", "Vanilla distribution. tf.Tensor: Vanilla mean. tf.Tensor: Vanilla log_std. tf.Tensor: Mean", "loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist = tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var)) return (normalized_dist, normalized_dist_mean,", "for the MLP for mean. For example, (32, 32) means", "of the training data. output_dim (int): Output dimension of the", "the weight of output dense layer(s). The function should return", "with 32 hidden units. min_std (float): If not None, the", "init_std=1.0, min_std=1e-6, max_std=None, std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None,", "exponential transformation - softplus: the std will be computed as", "<filename>garaged/src/garage/tf/regressors/gaussian_mlp_regressor_model.py \"\"\"GaussianMLPRegressorModel.\"\"\" import numpy as np import tensorflow as tf", "will be stored, and applied a exponential transformation - softplus:", "\"\"\"Network output spec. Return: list[str]: List of key(str) for the", "32 hidden units. min_std (float): If not None, the std", "layer(s) in the std network. std_output_nonlinearity (callable): Activation function for", "Returns: garage.tf.policies.GaussianMLPModel: Newly cloned model. \"\"\" new_regressor = self.__class__( name=name,", "Set it to None to maintain a linear activation. std_output_w_init", "+ self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) y_mean_var = tf.compat.v1.get_variable( name='y_mean_var', shape=(1,", "log_std. tfp.distributions.MultivariateNormalDiag: Vanilla distribution. tf.Tensor: Vanilla mean. tf.Tensor: Vanilla log_std.", "name): \"\"\"Return a clone of the model. It copies the", "output_b_init (callable): Initializer function for the bias of output dense", "are two options: - exp: the logarithm of the std", "layer_normalization (bool): Bool for using layer normalization or not. \"\"\"", "to avoid numerical issues. max_std (float): If not None, the", "= tf.compat.v1.get_variable( name='y_std_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) normalized_xs_var =", "for std. adaptive_std (bool): Is std a neural network. If", "Normalized log_std. tfp.distributions.MultivariateNormalDiag: Vanilla distribution. tf.Tensor: Vanilla mean. tf.Tensor: Vanilla", "normalized_dist = tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist = tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var))", "= tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var)) return (normalized_dist, normalized_dist_mean, normalized_dist_log_std, vanilla_dist, means_var,", "tf.Tensor. Set it to None to maintain a linear activation.", "hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std, max_std=self._max_std,", "Output dimension of the model. name (str): Model name, also", "If False, it will be a parameter. std_share_network (bool): Boolean", "of two hidden layers, each with 32 hidden units. min_std", "should return a tf.Tensor. hidden_b_init (callable): Initializer function for the", "max_std, to avoid numerical issues. std_hidden_nonlinearity (callable): Nonlinearity for each", "is at least the value of min_std, to avoid numerical", "output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init,", "(callable): Initializer function for the weight of output dense layer(s)", "= input_shape def network_output_spec(self): \"\"\"Network output spec. Return: list[str]: List", "std_hidden_w_init (callable): Initializer function for the weight of intermediate dense", "a exponential transformation - softplus: the std will be computed", "be parametrized. There are two options: - exp: the logarithm", "the MLP consists of two hidden layers, each with 32", "example, (32, 32) means the MLP consists of two hidden", "'x_std', 'y_mean', 'y_std' ] def _build(self, state_input, name=None): \"\"\"Build model", "shape=(1, self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) normalized_xs_var = (state_input - x_mean_var)", "not None, the std is at least the value of", "super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std,", "for the weight of output dense layer(s). The function should", "in the std network. It should return a tf.Tensor. Set", "softplus: the std will be computed as log(1+exp(x)) layer_normalization (bool):", "tf.Tensor: Normalized mean. tf.Tensor: Normalized log_std. tfp.distributions.MultivariateNormalDiag: Vanilla distribution. tf.Tensor:", "of output dense layer(s). The function should return a tf.Tensor.", "placeholder(s). Args: state_input (tf.Tensor): Place holder for state input. name", "scale_diag=tf.exp(log_stds_var)) return (normalized_dist, normalized_dist_mean, normalized_dist_log_std, vanilla_dist, means_var, log_stds_var, x_mean_var, x_std_var,", "a parameter. std_share_network (bool): Boolean for whether mean and std", "__init__(self, input_shape, output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(),", "std will be computed as log(1+exp(x)) layer_normalization (bool): Bool for", "std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init,", "learn_std=True, adaptive_std=False, std_share_network=False, init_std=1.0, min_std=1e-6, max_std=None, std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform(", "also the variable scope of the inner model, if exist.", "the weight of intermediate dense layer(s) in the std network.", "hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False,", "dimension of the model. name (str): Model name, also the", "list[str]: List of key(str) for the network outputs. \"\"\" return", "inner model, if exist. One example is garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag:", "'x_mean', 'x_std', 'y_mean', 'y_std' ] def _build(self, state_input, name=None): \"\"\"Build", "class can be used to perform regression by fitting a", "same network. std_hidden_sizes (list[int]): Output dimension of dense layer(s) for", "for output dense layer. It should return a tf.Tensor. Set", "output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std,", "of dense layer(s) for the MLP for std. For example,", "value of max_std, to avoid numerical issues. std_hidden_nonlinearity (callable): Nonlinearity", "vanilla_dist, means_var, log_stds_var, x_mean_var, x_std_var, y_mean_var, y_std_var) def clone(self, name):", "Return: list[str]: List of key(str) for the network outputs. \"\"\"", "The function should return a tf.Tensor. output_b_init (callable): Initializer function", "not. \"\"\" def __init__(self, input_shape, output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh,", "a neural network. If False, it will be a parameter.", "(callable): Activation function for output dense layer. It should return", "output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False, std_share_network=False, init_std=1.0, min_std=1e-6, max_std=None, std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh,", "the model. It copies the configuration and parameters of the", "the primitive. Args: name (str): Name of the newly created", "np import tensorflow as tf import tensorflow_probability as tfp from", "the std network. It should return a tf.Tensor. Set it", "List of key(str) for the network outputs. \"\"\" return [", "to maintain a linear activation. std_output_w_init (callable): Initializer function for", "std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity, std_output_w_init=self._std_output_w_init, std_parameterization=self._std_parameterization, layer_normalization=self._layer_normalization) new_regressor.parameters = self.parameters return", "Initializer function for the bias of intermediate dense layer(s). The", "at most the value of max_std, to avoid numerical issues.", "normalized_xs_var = (state_input - x_mean_var) / x_std_var _, normalized_dist_mean, normalized_dist_log_std", "of intermediate dense layer(s) in the std network. std_output_nonlinearity (callable):", "shape=(1, self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) y_std_var = tf.compat.v1.get_variable( name='y_std_var', shape=(1,", "be different from source model if cloned under the same", "name='y_mean_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) y_std_var = tf.compat.v1.get_variable( name='y_std_var',", "self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) normalized_xs_var = (state_input - x_mean_var) /", "(str): How the std should be parametrized. There are two", "std_hidden_sizes (list[int]): Output dimension of dense layer(s) for the MLP", "'normalized_dist', 'normalized_mean', 'normalized_log_std', 'dist', 'mean', 'log_std', 'x_mean', 'x_std', 'y_mean', 'y_std'", "network. std_hidden_sizes (list[int]): Output dimension of dense layer(s) for the", "initializer=tf.ones_initializer(), trainable=False) y_mean_var = tf.compat.v1.get_variable( name='y_mean_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(),", "dense layer(s). The function should return a tf.Tensor. learn_std (bool):", "= tf.squeeze(normalized_dist_log_std, 1) with tf.name_scope('mean_network'): means_var = normalized_dist_mean * y_std_var", "Nonlinearity for each hidden layer in the std network. std_hidden_w_init", "extra # dimension normalized_dist_log_std = tf.squeeze(normalized_dist_log_std, 1) with tf.name_scope('mean_network'): means_var", "(bool): Bool for using layer normalization or not. \"\"\" def", "function for intermediate dense layer(s). It should return a tf.Tensor.", "the value of min_std, to avoid numerical issues. max_std (float):", "tf.Tensor: Mean for data. tf.Tensor: log_std for data. tf.Tensor: Mean", "tf.Tensor: Vanilla log_std. tf.Tensor: Mean for data. tf.Tensor: log_std for", "garage.tf.models.Model class. This class can be used to perform regression", "std share the same network. std_hidden_sizes (list[int]): Output dimension of", "learn_std (bool): Is std trainable. init_std (float): Initial value for", "trainable=False) x_std_var = tf.compat.v1.get_variable( name='x_std_var', shape=(1, ) + self._input_shape, dtype=np.float32,", "to squeeze the extra # dimension normalized_dist_log_std = tf.squeeze(normalized_dist_log_std, 1)", "means_var = normalized_dist_mean * y_std_var + y_mean_var with tf.name_scope('std_network'): log_stds_var", "return a tf.Tensor. hidden_b_init (callable): Initializer function for the bias", "function should return a tf.Tensor. output_b_init (callable): Initializer function for", "'dist', 'mean', 'log_std', 'x_mean', 'x_std', 'y_mean', 'y_std' ] def _build(self,", "a Gaussian distribution to the outputs. Args: input_shape (tuple[int]): Input", "class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based on garage.tf.models.Model class. This class can", "self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) x_std_var = tf.compat.v1.get_variable( name='x_std_var', shape=(1, )", "import tensorflow as tf import tensorflow_probability as tfp from garage.experiment", "tf.Tensor: log_std for label. \"\"\" with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var = tf.compat.v1.get_variable(", "can be used to perform regression by fitting a Gaussian", "intermediate dense layer(s). It should return a tf.Tensor. Set it", "hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False, std_share_network=False, init_std=1.0, min_std=1e-6,", ") + self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) x_std_var = tf.compat.v1.get_variable( name='x_std_var',", "Bool for using layer normalization or not. \"\"\" def __init__(self,", "Return: tfp.distributions.MultivariateNormalDiag: Normlizaed distribution. tf.Tensor: Normalized mean. tf.Tensor: Normalized log_std.", "MLP for std. For example, (32, 32) means the MLP", "(state_input - x_mean_var) / x_std_var _, normalized_dist_mean, normalized_dist_log_std = super()._build(", "(bool): Boolean for whether mean and std share the same", "seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False, std_share_network=False, init_std=1.0, min_std=1e-6, max_std=None, std_hidden_sizes=(32, 32),", "y_mean_var, y_std_var) def clone(self, name): \"\"\"Return a clone of the", "the std should be parametrized. There are two options: -", "None, the std is at least the value of min_std,", "name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std,", "output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity,", "The function should return a tf.Tensor. hidden_b_init (callable): Initializer function", "two hidden layers, each with 32 hidden units. hidden_nonlinearity (callable):", "the newly created model. It has to be different from", "of the std will be stored, and applied a exponential", "tf.Tensor: Mean for label. tf.Tensor: log_std for label. \"\"\" with", "each with 32 hidden units. hidden_nonlinearity (callable): Activation function for", "input_shape (tuple[int]): Input shape of the training data. output_dim (int):", "tf.compat.v1.get_variable( name='y_mean_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) y_std_var = tf.compat.v1.get_variable(", "(callable): Initializer function for the bias of intermediate dense layer(s)", "label. \"\"\" with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var = tf.compat.v1.get_variable( name='x_mean', shape=(1, )", "the MLP for mean. For example, (32, 32) means the", "None to maintain a linear activation. hidden_w_init (callable): Initializer function", "log_stds_var, x_mean_var, x_std_var, y_mean_var, y_std_var) def clone(self, name): \"\"\"Return a", "Mean for label. tf.Tensor: log_std for label. \"\"\" with tf.compat.v1.variable_scope('normalized_vars'):", "based on garage.tf.models.Model class. This class can be used to", "(callable): Initializer function for the weight of output dense layer(s).", "hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False, std_share_network=False,", "layer(s) in the std network. std_parameterization (str): How the std", "return [ 'normalized_dist', 'normalized_mean', 'normalized_log_std', 'dist', 'mean', 'log_std', 'x_mean', 'x_std',", "std. For example, (32, 32) means the MLP consists of", "name=name, input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init,", "Input shape of the training data. output_dim (int): Output dimension", "name, also the variable scope of the inner model, if", "the std will be stored, and applied a exponential transformation", "(tf.Tensor): Place holder for state input. name (str): Inner model", "Normlizaed distribution. tf.Tensor: Normalized mean. tf.Tensor: Normalized log_std. tfp.distributions.MultivariateNormalDiag: Vanilla", "name, also the variable scope. hidden_sizes (list[int]): Output dimension of", "x_mean_var = tf.compat.v1.get_variable( name='x_mean', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(),", "as np import tensorflow as tf import tensorflow_probability as tfp", "(str): Name of the newly created model. It has to", "a tf.Tensor. output_nonlinearity (callable): Activation function for output dense layer.", "parametrized. There are two options: - exp: the logarithm of", "the extra # dimension normalized_dist_log_std = tf.squeeze(normalized_dist_log_std, 1) with tf.name_scope('mean_network'):", "(callable): Initializer function for the bias of intermediate dense layer(s).", "function for the weight of intermediate dense layer(s). The function", "None to maintain a linear activation. std_output_w_init (callable): Initializer function", "layer(s) for the MLP for std. For example, (32, 32)", "Vanilla mean. tf.Tensor: Vanilla log_std. tf.Tensor: Mean for data. tf.Tensor:", "class. This class can be used to perform regression by", "each with 32 hidden units. min_std (float): If not None,", "= (state_input - x_mean_var) / x_std_var _, normalized_dist_mean, normalized_dist_log_std =", "y_mean_var = tf.compat.v1.get_variable( name='y_mean_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) y_std_var", "label. tf.Tensor: log_std for label. \"\"\" with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var =", "_, normalized_dist_mean, normalized_dist_log_std = super()._build( normalized_xs_var) # Since regressor expects", "return (normalized_dist, normalized_dist_mean, normalized_dist_log_std, vanilla_dist, means_var, log_stds_var, x_mean_var, x_std_var, y_mean_var,", "min_std, to avoid numerical issues. max_std (float): If not None,", "configuration and parameters of the primitive. Args: name (str): Name", "the bias of intermediate dense layer(s). The function should return", "It has to be different from source model if cloned", "/ x_std_var _, normalized_dist_mean, normalized_dist_log_std = super()._build( normalized_xs_var) # Since", "computed as log(1+exp(x)) layer_normalization (bool): Bool for using layer normalization", "given input placeholder(s). Args: state_input (tf.Tensor): Place holder for state", "= tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist = tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var)) return", "MLP for mean. For example, (32, 32) means the MLP", "std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape = input_shape def network_output_spec(self): \"\"\"Network output spec.", "= tf.compat.v1.get_variable( name='x_mean', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False)", "the std is at least the value of min_std, to", "from garage.tf.models import GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based on garage.tf.models.Model", "shape of the training data. output_dim (int): Output dimension of", "None to maintain a linear activation. output_w_init (callable): Initializer function", "Initializer function for the weight of intermediate dense layer(s). The", "two hidden layers, each with 32 hidden units. min_std (float):", "the std is at most the value of max_std, to", "hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std, min_std=min_std,", "self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) y_std_var = tf.compat.v1.get_variable( name='y_std_var', shape=(1, self._output_dim),", "normalized_xs_var) # Since regressor expects [N, *dims], we need to", "normalized_dist_mean * y_std_var + y_mean_var with tf.name_scope('std_network'): log_stds_var = normalized_dist_log_std", "std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init,", "seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity,", "output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std, min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity,", "used to perform regression by fitting a Gaussian distribution to", "\"\"\" return [ 'normalized_dist', 'normalized_mean', 'normalized_log_std', 'dist', 'mean', 'log_std', 'x_mean',", "(callable): Activation function for output dense layer in the std", "tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var)) return (normalized_dist, normalized_dist_mean, normalized_dist_log_std, vanilla_dist, means_var, log_stds_var,", "for label. tf.Tensor: log_std for label. \"\"\" with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var", "[N, *dims], we need to squeeze the extra # dimension", "a tf.Tensor. hidden_b_init (callable): Initializer function for the bias of", "hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(),", "= super()._build( normalized_xs_var) # Since regressor expects [N, *dims], we", "maintain a linear activation. output_w_init (callable): Initializer function for the", "regressor expects [N, *dims], we need to squeeze the extra", "+ self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) x_std_var = tf.compat.v1.get_variable( name='x_std_var', shape=(1,", "max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape = input_shape def", "* y_std_var + y_mean_var with tf.name_scope('std_network'): log_stds_var = normalized_dist_log_std +", "layer(s). The function should return a tf.Tensor. hidden_b_init (callable): Initializer", "of the model. name (str): Model name, also the variable", "at least the value of min_std, to avoid numerical issues.", "garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag: Normlizaed distribution. tf.Tensor: Normalized mean. tf.Tensor: Normalized", "model. name (str): Model name, also the variable scope. hidden_sizes", "a linear activation. std_output_w_init (callable): Initializer function for the weight", "= tf.compat.v1.get_variable( name='y_mean_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) y_std_var =", "network. std_hidden_b_init (callable): Initializer function for the bias of intermediate", "should return a tf.Tensor. learn_std (bool): Is std trainable. init_std", "as log(1+exp(x)) layer_normalization (bool): Bool for using layer normalization or", "adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std, min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization)", "(32, 32) means the MLP consists of two hidden layers,", "model given input placeholder(s). Args: state_input (tf.Tensor): Place holder for", "for data. tf.Tensor: Mean for label. tf.Tensor: log_std for label.", "(list[int]): Output dimension of dense layer(s) for the MLP for", "trainable=False) y_mean_var = tf.compat.v1.get_variable( name='y_mean_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False)", "the weight of output dense layer(s) in the std network.", "layer_normalization=layer_normalization) self._input_shape = input_shape def network_output_spec(self): \"\"\"Network output spec. Return:", "with tf.name_scope('std_network'): log_stds_var = normalized_dist_log_std + tf.math.log(y_std_var) normalized_dist = tfp.distributions.MultivariateNormalDiag(", "There are two options: - exp: the logarithm of the", "deterministic from garage.tf.models import GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based on", "a tf.Tensor. output_b_init (callable): Initializer function for the bias of", "mean. For example, (32, 32) means the MLP consists of", "dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) normalized_xs_var = (state_input - x_mean_var) / x_std_var", "- exp: the logarithm of the std will be stored,", "dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) y_std_var = tf.compat.v1.get_variable( name='y_std_var', shape=(1, self._output_dim), dtype=np.float32,", "computational graph. Returns: garage.tf.policies.GaussianMLPModel: Newly cloned model. \"\"\" new_regressor =", "data. tf.Tensor: Mean for label. tf.Tensor: log_std for label. \"\"\"", "dense layer in the std network. It should return a", "be stored, and applied a exponential transformation - softplus: the", "adaptive_std (bool): Is std a neural network. If False, it", "tf.squeeze(normalized_dist_log_std, 1) with tf.name_scope('mean_network'): means_var = normalized_dist_mean * y_std_var +", "(bool): Is std trainable. init_std (float): Initial value for std.", "layer normalization or not. \"\"\" def __init__(self, input_shape, output_dim, name='GaussianMLPRegressorModel',", "trainable. init_std (float): Initial value for std. adaptive_std (bool): Is", "self._input_shape = input_shape def network_output_spec(self): \"\"\"Network output spec. Return: list[str]:", "variable scope of the inner model, if exist. One example", "return a tf.Tensor. Set it to None to maintain a", "function for output dense layer in the std network. It", "expects [N, *dims], we need to squeeze the extra #", "normalized_dist_mean, normalized_dist_log_std, vanilla_dist, means_var, log_stds_var, x_mean_var, x_std_var, y_mean_var, y_std_var) def", "layer(s). The function should return a tf.Tensor. learn_std (bool): Is", "the MLP for std. For example, (32, 32) means the", "One example is garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag: Normlizaed distribution. tf.Tensor: Normalized", "Args: input_shape (tuple[int]): Input shape of the training data. output_dim", "output spec. Return: list[str]: List of key(str) for the network", "1) with tf.name_scope('mean_network'): means_var = normalized_dist_mean * y_std_var + y_mean_var", "for state input. name (str): Inner model name, also the", "hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std,", "std. adaptive_std (bool): Is std a neural network. If False,", "weight of intermediate dense layer(s). The function should return a", "most the value of max_std, to avoid numerical issues. std_hidden_nonlinearity", "options: - exp: the logarithm of the std will be", "tf.compat.v1.get_variable( name='y_std_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) normalized_xs_var = (state_input", "function should return a tf.Tensor. output_nonlinearity (callable): Activation function for", "tf.compat.v1.get_variable( name='x_std_var', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) y_mean_var", "from source model if cloned under the same computational graph.", "for each hidden layer in the std network. std_hidden_w_init (callable):", "for std. For example, (32, 32) means the MLP consists", "name='GaussianMLPRegressorModel', hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()),", "for label. \"\"\" with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var = tf.compat.v1.get_variable( name='x_mean', shape=(1,", "function should return a tf.Tensor. hidden_b_init (callable): Initializer function for", "a tf.Tensor. Set it to None to maintain a linear", "of intermediate dense layer(s). The function should return a tf.Tensor.", "dense layer(s). The function should return a tf.Tensor. output_nonlinearity (callable):", "of max_std, to avoid numerical issues. std_hidden_nonlinearity (callable): Nonlinearity for", "(callable): Nonlinearity for each hidden layer in the std network.", "model, if exist. One example is garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag: Normlizaed", "stored, and applied a exponential transformation - softplus: the std", "transformation - softplus: the std will be computed as log(1+exp(x))", "the std will be computed as log(1+exp(x)) layer_normalization (bool): Bool", "neural network. If False, it will be a parameter. std_share_network", "log_std for label. \"\"\" with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var = tf.compat.v1.get_variable( name='x_mean',", "= self.__class__( name=name, input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity,", "def network_output_spec(self): \"\"\"Network output spec. Return: list[str]: List of key(str)", "dense layer(s) for the MLP for std. For example, (32,", "in the std network. std_output_nonlinearity (callable): Activation function for output", "tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist = tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var)) return (normalized_dist,", "std network. std_hidden_b_init (callable): Initializer function for the bias of", "for the MLP for std. For example, (32, 32) means", "intermediate dense layer(s) in the std network. std_hidden_b_init (callable): Initializer", "the network outputs. \"\"\" return [ 'normalized_dist', 'normalized_mean', 'normalized_log_std', 'dist',", "x_std_var = tf.compat.v1.get_variable( name='x_std_var', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(),", "initializer=tf.zeros_initializer(), trainable=False) x_std_var = tf.compat.v1.get_variable( name='x_std_var', shape=(1, ) + self._input_shape,", "(int): Output dimension of the model. name (str): Model name,", "output dense layer(s). The function should return a tf.Tensor. learn_std", "if exist. One example is garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag: Normlizaed distribution.", "# dimension normalized_dist_log_std = tf.squeeze(normalized_dist_log_std, 1) with tf.name_scope('mean_network'): means_var =", "to avoid numerical issues. std_hidden_nonlinearity (callable): Nonlinearity for each hidden", "the variable scope. hidden_sizes (list[int]): Output dimension of dense layer(s)", "Initializer function for the weight of output dense layer(s). The", "adaptive_std=False, std_share_network=False, init_std=1.0, min_std=1e-6, max_std=None, std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()),", "mean and std share the same network. std_hidden_sizes (list[int]): Output", "layer. It should return a tf.Tensor. Set it to None", "output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std, min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity,", "by fitting a Gaussian distribution to the outputs. Args: input_shape", "output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes,", "name=None): \"\"\"Build model given input placeholder(s). Args: state_input (tf.Tensor): Place", "as tfp from garage.experiment import deterministic from garage.tf.models import GaussianMLPModel", "layer(s) in the std network. std_hidden_b_init (callable): Initializer function for", "y_mean_var with tf.name_scope('std_network'): log_stds_var = normalized_dist_log_std + tf.math.log(y_std_var) normalized_dist =", "return a tf.Tensor. output_nonlinearity (callable): Activation function for output dense", "model. \"\"\" new_regressor = self.__class__( name=name, input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity,", "init_std=self._init_std, min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity, std_output_w_init=self._std_output_w_init, std_parameterization=self._std_parameterization,", "for whether mean and std share the same network. std_hidden_sizes", "init_std (float): Initial value for std. adaptive_std (bool): Is std", "] def _build(self, state_input, name=None): \"\"\"Build model given input placeholder(s).", "std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim,", "(str): Model name, also the variable scope. hidden_sizes (list[int]): Output", "network. std_parameterization (str): How the std should be parametrized. There", "should be parametrized. There are two options: - exp: the", "- softplus: the std will be computed as log(1+exp(x)) layer_normalization", "std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity, std_output_w_init=self._std_output_w_init,", "data. tf.Tensor: log_std for data. tf.Tensor: Mean for label. tf.Tensor:", "scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist = tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var)) return (normalized_dist, normalized_dist_mean, normalized_dist_log_std,", "'y_std' ] def _build(self, state_input, name=None): \"\"\"Build model given input", "output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform(", "std network. std_parameterization (str): How the std should be parametrized.", "example is garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag: Normlizaed distribution. tf.Tensor: Normalized mean.", "the training data. output_dim (int): Output dimension of the model.", "each hidden layer in the std network. std_hidden_w_init (callable): Initializer", "activation. std_output_w_init (callable): Initializer function for the weight of output", "maintain a linear activation. std_output_w_init (callable): Initializer function for the", "self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) y_mean_var = tf.compat.v1.get_variable( name='y_mean_var', shape=(1, self._output_dim),", "output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std, min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes,", "\"\"\"GaussianMLPRegressor based on garage.tf.models.Model class. This class can be used", "for output dense layer in the std network. It should", "dimension normalized_dist_log_std = tf.squeeze(normalized_dist_log_std, 1) with tf.name_scope('mean_network'): means_var = normalized_dist_mean", "tf.name_scope('mean_network'): means_var = normalized_dist_mean * y_std_var + y_mean_var with tf.name_scope('std_network'):", "for the weight of intermediate dense layer(s). The function should", "Model name, also the variable scope. hidden_sizes (list[int]): Output dimension", "least the value of min_std, to avoid numerical issues. max_std", "primitive. Args: name (str): Name of the newly created model.", "dense layer(s) in the std network. std_hidden_b_init (callable): Initializer function", "normalized_dist_mean, normalized_dist_log_std = super()._build( normalized_xs_var) # Since regressor expects [N,", "max_std (float): If not None, the std is at most", "function should return a tf.Tensor. learn_std (bool): Is std trainable.", "y_std_var + y_mean_var with tf.name_scope('std_network'): log_stds_var = normalized_dist_log_std + tf.math.log(y_std_var)", "layer in the std network. It should return a tf.Tensor.", "hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network,", "import GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based on garage.tf.models.Model class. This", "network_output_spec(self): \"\"\"Network output spec. Return: list[str]: List of key(str) for", "\"\"\"Build model given input placeholder(s). Args: state_input (tf.Tensor): Place holder", "(tuple[int]): Input shape of the training data. output_dim (int): Output", "activation. hidden_w_init (callable): Initializer function for the weight of intermediate", "units. hidden_nonlinearity (callable): Activation function for intermediate dense layer(s). It", "vanilla_dist = tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var)) return (normalized_dist, normalized_dist_mean, normalized_dist_log_std, vanilla_dist,", "\"\"\"Return a clone of the model. It copies the configuration", "\"\"\" with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var = tf.compat.v1.get_variable( name='x_mean', shape=(1, ) +", "hidden layer in the std network. std_hidden_w_init (callable): Initializer function", "tf.Tensor. hidden_b_init (callable): Initializer function for the bias of intermediate", "with 32 hidden units. hidden_nonlinearity (callable): Activation function for intermediate", "dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) y_mean_var = tf.compat.v1.get_variable( name='y_mean_var', shape=(1, self._output_dim), dtype=np.float32,", "import numpy as np import tensorflow as tf import tensorflow_probability", "std network. It should return a tf.Tensor. Set it to", "for the bias of intermediate dense layer(s). The function should", "issues. std_hidden_nonlinearity (callable): Nonlinearity for each hidden layer in the", "for the weight of intermediate dense layer(s) in the std", "state_input, name=None): \"\"\"Build model given input placeholder(s). Args: state_input (tf.Tensor):", "weight of output dense layer(s). The function should return a", "min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity, std_output_w_init=self._std_output_w_init, std_parameterization=self._std_parameterization, layer_normalization=self._layer_normalization)", "max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity, std_output_w_init=self._std_output_w_init, std_parameterization=self._std_parameterization, layer_normalization=self._layer_normalization) new_regressor.parameters", "network. It should return a tf.Tensor. Set it to None", "Is std trainable. init_std (float): Initial value for std. adaptive_std", "dense layer(s). The function should return a tf.Tensor. output_b_init (callable):", "shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) y_mean_var = tf.compat.v1.get_variable(", "input_shape, output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None,", "two options: - exp: the logarithm of the std will", "normalized_dist_log_std, vanilla_dist, means_var, log_stds_var, x_mean_var, x_std_var, y_mean_var, y_std_var) def clone(self,", "distribution to the outputs. Args: input_shape (tuple[int]): Input shape of", "output dense layer. It should return a tf.Tensor. Set it", "tf.compat.v1.variable_scope('normalized_vars'): x_mean_var = tf.compat.v1.get_variable( name='x_mean', shape=(1, ) + self._input_shape, dtype=np.float32,", "return a tf.Tensor. learn_std (bool): Is std trainable. init_std (float):", "std network. std_hidden_w_init (callable): Initializer function for the weight of", "std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape = input_shape def network_output_spec(self): \"\"\"Network", "std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp',", "the weight of intermediate dense layer(s). The function should return", "(float): Initial value for std. adaptive_std (bool): Is std a", "or not. \"\"\" def __init__(self, input_shape, output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32, 32),", "model. It copies the configuration and parameters of the primitive.", "# Since regressor expects [N, *dims], we need to squeeze", "name (str): Name of the newly created model. It has", "network outputs. \"\"\" return [ 'normalized_dist', 'normalized_mean', 'normalized_log_std', 'dist', 'mean',", "if cloned under the same computational graph. Returns: garage.tf.policies.GaussianMLPModel: Newly", "a linear activation. output_w_init (callable): Initializer function for the weight", "numerical issues. max_std (float): If not None, the std is", "state_input (tf.Tensor): Place holder for state input. name (str): Inner", "dimension of dense layer(s) for the MLP for std. For", "Args: name (str): Name of the newly created model. It", "output dense layer(s). The function should return a tf.Tensor. output_b_init", "the bias of output dense layer(s). The function should return", "std_parameterization (str): How the std should be parametrized. There are", "consists of two hidden layers, each with 32 hidden units.", "Activation function for output dense layer in the std network.", "it to None to maintain a linear activation. output_w_init (callable):", "of intermediate dense layer(s) in the std network. std_hidden_b_init (callable):", "weight of output dense layer(s) in the std network. std_parameterization", "'y_mean', 'y_std' ] def _build(self, state_input, name=None): \"\"\"Build model given", "model name, also the variable scope of the inner model,", "dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) x_std_var = tf.compat.v1.get_variable( name='x_std_var', shape=(1, ) +", "to the outputs. Args: input_shape (tuple[int]): Input shape of the", "for intermediate dense layer(s). It should return a tf.Tensor. Set", "If not None, the std is at least the value", "we need to squeeze the extra # dimension normalized_dist_log_std =", "with tf.name_scope('mean_network'): means_var = normalized_dist_mean * y_std_var + y_mean_var with", "hidden units. hidden_nonlinearity (callable): Activation function for intermediate dense layer(s).", "input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std,", "std_output_w_init (callable): Initializer function for the weight of output dense", "log(1+exp(x)) layer_normalization (bool): Bool for using layer normalization or not.", "MLP consists of two hidden layers, each with 32 hidden", "GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based on garage.tf.models.Model class. This class", "cloned under the same computational graph. Returns: garage.tf.policies.GaussianMLPModel: Newly cloned", "Boolean for whether mean and std share the same network.", "outputs. \"\"\" return [ 'normalized_dist', 'normalized_mean', 'normalized_log_std', 'dist', 'mean', 'log_std',", "max_std=None, std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()),", "scope of the inner model, if exist. One example is", "should return a tf.Tensor. output_nonlinearity (callable): Activation function for output", "to be different from source model if cloned under the", "hidden_nonlinearity (callable): Activation function for intermediate dense layer(s). It should", "This class can be used to perform regression by fitting", "(callable): Initializer function for the bias of output dense layer(s).", "function for output dense layer. It should return a tf.Tensor.", "dense layer(s). It should return a tf.Tensor. Set it to", "loc=means_var, scale_diag=tf.exp(log_stds_var)) return (normalized_dist, normalized_dist_mean, normalized_dist_log_std, vanilla_dist, means_var, log_stds_var, x_mean_var,", "function for the bias of intermediate dense layer(s) in the", "\"\"\"GaussianMLPRegressorModel.\"\"\" import numpy as np import tensorflow as tf import", "hidden layers, each with 32 hidden units. hidden_nonlinearity (callable): Activation", "share the same network. std_hidden_sizes (list[int]): Output dimension of dense", "(normalized_dist, normalized_dist_mean, normalized_dist_log_std, vanilla_dist, means_var, log_stds_var, x_mean_var, x_std_var, y_mean_var, y_std_var)", "tensorflow as tf import tensorflow_probability as tfp from garage.experiment import", "The function should return a tf.Tensor. output_nonlinearity (callable): Activation function", "name='x_std_var', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) y_mean_var =", "maintain a linear activation. hidden_w_init (callable): Initializer function for the", "avoid numerical issues. std_hidden_nonlinearity (callable): Nonlinearity for each hidden layer", "Output dimension of dense layer(s) for the MLP for std.", "created model. It has to be different from source model", "be used to perform regression by fitting a Gaussian distribution", "using layer normalization or not. \"\"\" def __init__(self, input_shape, output_dim,", "numerical issues. std_hidden_nonlinearity (callable): Nonlinearity for each hidden layer in", "mean. tf.Tensor: Normalized log_std. tfp.distributions.MultivariateNormalDiag: Vanilla distribution. tf.Tensor: Vanilla mean.", "Place holder for state input. name (str): Inner model name,", "normalized_dist_log_std = super()._build( normalized_xs_var) # Since regressor expects [N, *dims],", "std_hidden_nonlinearity (callable): Nonlinearity for each hidden layer in the std", "Output dimension of dense layer(s) for the MLP for mean.", "dense layer(s) for the MLP for mean. For example, (32,", "dense layer(s) in the std network. std_parameterization (str): How the", "std network. std_output_nonlinearity (callable): Activation function for output dense layer", "std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity, std_output_w_init=self._std_output_w_init, std_parameterization=self._std_parameterization, layer_normalization=self._layer_normalization) new_regressor.parameters = self.parameters return new_regressor", "model if cloned under the same computational graph. Returns: garage.tf.policies.GaussianMLPModel:", "super()._build( normalized_xs_var) # Since regressor expects [N, *dims], we need", "a linear activation. hidden_w_init (callable): Initializer function for the weight", "to maintain a linear activation. hidden_w_init (callable): Initializer function for", "has to be different from source model if cloned under", "means the MLP consists of two hidden layers, each with", "Initial value for std. adaptive_std (bool): Is std a neural", "whether mean and std share the same network. std_hidden_sizes (list[int]):", "bias of output dense layer(s). The function should return a", "need to squeeze the extra # dimension normalized_dist_log_std = tf.squeeze(normalized_dist_log_std,", "layer_normalization=False): super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init,", "initializer=tf.ones_initializer(), trainable=False) normalized_xs_var = (state_input - x_mean_var) / x_std_var _,", "32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True,", "(bool): Is std a neural network. If False, it will", "None, the std is at most the value of max_std,", "32) means the MLP consists of two hidden layers, each", "(callable): Initializer function for the weight of intermediate dense layer(s)", "dense layer(s). The function should return a tf.Tensor. hidden_b_init (callable):", "it to None to maintain a linear activation. std_output_w_init (callable):", "(callable): Activation function for intermediate dense layer(s). It should return", "be computed as log(1+exp(x)) layer_normalization (bool): Bool for using layer", "_build(self, state_input, name=None): \"\"\"Build model given input placeholder(s). Args: state_input", "Mean for data. tf.Tensor: log_std for data. tf.Tensor: Mean for", "hidden layers, each with 32 hidden units. min_std (float): If", "a clone of the model. It copies the configuration and", "network. If False, it will be a parameter. std_share_network (bool):", "linear activation. hidden_w_init (callable): Initializer function for the weight of", "under the same computational graph. Returns: garage.tf.policies.GaussianMLPModel: Newly cloned model.", "learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std, min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization,", "should return a tf.Tensor. output_b_init (callable): Initializer function for the", "will be a parameter. std_share_network (bool): Boolean for whether mean", "garage.experiment import deterministic from garage.tf.models import GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor", "tf import tensorflow_probability as tfp from garage.experiment import deterministic from", "y_std_var = tf.compat.v1.get_variable( name='y_std_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) normalized_xs_var", "the value of max_std, to avoid numerical issues. std_hidden_nonlinearity (callable):", "Vanilla log_std. tf.Tensor: Mean for data. tf.Tensor: log_std for data.", "normalization or not. \"\"\" def __init__(self, input_shape, output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32,", "outputs. Args: input_shape (tuple[int]): Input shape of the training data.", "on garage.tf.models.Model class. This class can be used to perform", "32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False):", "How the std should be parametrized. There are two options:", "parameter. std_share_network (bool): Boolean for whether mean and std share", "layer(s). It should return a tf.Tensor. Set it to None", "function for the weight of intermediate dense layer(s) in the", "x_mean_var, x_std_var, y_mean_var, y_std_var) def clone(self, name): \"\"\"Return a clone", "issues. max_std (float): If not None, the std is at", "exist. One example is garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag: Normlizaed distribution. tf.Tensor:", "garage.tf.models import GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based on garage.tf.models.Model class.", "Normalized mean. tf.Tensor: Normalized log_std. tfp.distributions.MultivariateNormalDiag: Vanilla distribution. tf.Tensor: Vanilla", "to None to maintain a linear activation. hidden_w_init (callable): Initializer", "function for the weight of output dense layer(s). The function", "Is std a neural network. If False, it will be", "std a neural network. If False, it will be a", "std will be stored, and applied a exponential transformation -", "y_std_var) def clone(self, name): \"\"\"Return a clone of the model.", "not None, the std is at most the value of", "regression by fitting a Gaussian distribution to the outputs. Args:", "distribution. tf.Tensor: Normalized mean. tf.Tensor: Normalized log_std. tfp.distributions.MultivariateNormalDiag: Vanilla distribution.", "(str): Inner model name, also the variable scope of the", "layer in the std network. std_hidden_w_init (callable): Initializer function for", "If not None, the std is at most the value", "'normalized_mean', 'normalized_log_std', 'dist', 'mean', 'log_std', 'x_mean', 'x_std', 'y_mean', 'y_std' ]", "the inner model, if exist. One example is garage.tf.models.Sequential. Return:", "name (str): Model name, also the variable scope. hidden_sizes (list[int]):", "layer(s) for the MLP for mean. For example, (32, 32)", "to perform regression by fitting a Gaussian distribution to the", "= tf.compat.v1.get_variable( name='x_std_var', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(), trainable=False)", "name (str): Inner model name, also the variable scope of", "network. std_hidden_w_init (callable): Initializer function for the weight of intermediate", "input placeholder(s). Args: state_input (tf.Tensor): Place holder for state input.", "squeeze the extra # dimension normalized_dist_log_std = tf.squeeze(normalized_dist_log_std, 1) with", "linear activation. std_output_w_init (callable): Initializer function for the weight of", "parameters of the primitive. Args: name (str): Name of the", "std_share_network (bool): Boolean for whether mean and std share the", "of output dense layer(s) in the std network. std_parameterization (str):", "should return a tf.Tensor. Set it to None to maintain", "avoid numerical issues. max_std (float): If not None, the std", "in the std network. std_parameterization (str): How the std should", "std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape = input_shape def network_output_spec(self):", "It copies the configuration and parameters of the primitive. Args:", "tf.math.log(y_std_var) normalized_dist = tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist = tfp.distributions.MultivariateNormalDiag( loc=means_var,", "is garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag: Normlizaed distribution. tf.Tensor: Normalized mean. tf.Tensor:", ") + self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) y_mean_var = tf.compat.v1.get_variable( name='y_mean_var',", "of the model. It copies the configuration and parameters of", "source model if cloned under the same computational graph. Returns:", "min_std=1e-6, max_std=None, std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform(", "applied a exponential transformation - softplus: the std will be", "units. min_std (float): If not None, the std is at", "newly created model. It has to be different from source", "std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity, std_output_w_init=self._std_output_w_init, std_parameterization=self._std_parameterization, layer_normalization=self._layer_normalization) new_regressor.parameters = self.parameters", "to None to maintain a linear activation. output_w_init (callable): Initializer", "tf.Tensor. output_nonlinearity (callable): Activation function for output dense layer. It", "import tensorflow_probability as tfp from garage.experiment import deterministic from garage.tf.models", "'log_std', 'x_mean', 'x_std', 'y_mean', 'y_std' ] def _build(self, state_input, name=None):", "clone of the model. It copies the configuration and parameters", "scope. hidden_sizes (list[int]): Output dimension of dense layer(s) for the", "hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std,", "Initializer function for the bias of intermediate dense layer(s) in", "intermediate dense layer(s). The function should return a tf.Tensor. hidden_b_init", "for mean. For example, (32, 32) means the MLP consists", "of the inner model, if exist. One example is garage.tf.models.Sequential.", "- x_mean_var) / x_std_var _, normalized_dist_mean, normalized_dist_log_std = super()._build( normalized_xs_var)", "of the newly created model. It has to be different", "def clone(self, name): \"\"\"Return a clone of the model. It", "output_dim (int): Output dimension of the model. name (str): Model", "self.__class__( name=name, input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init,", "shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) x_std_var = tf.compat.v1.get_variable(", "variable scope. hidden_sizes (list[int]): Output dimension of dense layer(s) for", "(float): If not None, the std is at most the", "different from source model if cloned under the same computational", "tf.compat.v1.get_variable( name='x_mean', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) x_std_var", "min_std (float): If not None, the std is at least", "std_hidden_b_init (callable): Initializer function for the bias of intermediate dense", "of key(str) for the network outputs. \"\"\" return [ 'normalized_dist',", "std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init,", "logarithm of the std will be stored, and applied a", "std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity, std_output_w_init=self._std_output_w_init, std_parameterization=self._std_parameterization, layer_normalization=self._layer_normalization) new_regressor.parameters =", "std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim, name=name,", "intermediate dense layer(s). The function should return a tf.Tensor. output_nonlinearity", "hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network,", "'normalized_log_std', 'dist', 'mean', 'log_std', 'x_mean', 'x_std', 'y_mean', 'y_std' ] def", "intermediate dense layer(s) in the std network. std_output_nonlinearity (callable): Activation", "layers, each with 32 hidden units. hidden_nonlinearity (callable): Activation function", "std is at most the value of max_std, to avoid", "the std network. std_hidden_b_init (callable): Initializer function for the bias", "std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape = input_shape def network_output_spec(self): \"\"\"Network output", "(callable): Initializer function for the weight of intermediate dense layer(s).", "layer(s). The function should return a tf.Tensor. output_b_init (callable): Initializer", "x_mean_var) / x_std_var _, normalized_dist_mean, normalized_dist_log_std = super()._build( normalized_xs_var) #", "for data. tf.Tensor: log_std for data. tf.Tensor: Mean for label.", "the std network. std_output_nonlinearity (callable): Activation function for output dense", "tfp.distributions.MultivariateNormalDiag: Normlizaed distribution. tf.Tensor: Normalized mean. tf.Tensor: Normalized log_std. tfp.distributions.MultivariateNormalDiag:", "output dense layer in the std network. It should return", "to None to maintain a linear activation. std_output_w_init (callable): Initializer", "activation. output_w_init (callable): Initializer function for the weight of output", "normalized_dist_log_std = tf.squeeze(normalized_dist_log_std, 1) with tf.name_scope('mean_network'): means_var = normalized_dist_mean *", "means_var, log_stds_var, x_mean_var, x_std_var, y_mean_var, y_std_var) def clone(self, name): \"\"\"Return", "clone(self, name): \"\"\"Return a clone of the model. It copies", "and std share the same network. std_hidden_sizes (list[int]): Output dimension", "\"\"\" def __init__(self, input_shape, output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform(", "model. It has to be different from source model if", "Inner model name, also the variable scope of the inner", "numpy as np import tensorflow as tf import tensorflow_probability as", "the bias of intermediate dense layer(s) in the std network.", "= normalized_dist_mean * y_std_var + y_mean_var with tf.name_scope('std_network'): log_stds_var =", "be a parameter. std_share_network (bool): Boolean for whether mean and", "tf.Tensor: log_std for data. tf.Tensor: Mean for label. tf.Tensor: log_std", "the configuration and parameters of the primitive. Args: name (str):", "tf.Tensor. learn_std (bool): Is std trainable. init_std (float): Initial value", "holder for state input. name (str): Inner model name, also", "tf.Tensor: Vanilla mean. tf.Tensor: Vanilla log_std. tf.Tensor: Mean for data.", "with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var = tf.compat.v1.get_variable( name='x_mean', shape=(1, ) + self._input_shape,", "the same computational graph. Returns: garage.tf.policies.GaussianMLPModel: Newly cloned model. \"\"\"", "data. output_dim (int): Output dimension of the model. name (str):", "is at most the value of max_std, to avoid numerical", "the same network. std_hidden_sizes (list[int]): Output dimension of dense layer(s)", "std_output_nonlinearity (callable): Activation function for output dense layer in the", "name='y_std_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) normalized_xs_var = (state_input -", "function for the bias of intermediate dense layer(s). The function", "trainable=False) normalized_xs_var = (state_input - x_mean_var) / x_std_var _, normalized_dist_mean,", "for the network outputs. \"\"\" return [ 'normalized_dist', 'normalized_mean', 'normalized_log_std',", "[ 'normalized_dist', 'normalized_mean', 'normalized_log_std', 'dist', 'mean', 'log_std', 'x_mean', 'x_std', 'y_mean',", "Activation function for output dense layer. It should return a", "also the variable scope. hidden_sizes (list[int]): Output dimension of dense", "it to None to maintain a linear activation. hidden_w_init (callable):", "value of min_std, to avoid numerical issues. max_std (float): If", "= normalized_dist_log_std + tf.math.log(y_std_var) normalized_dist = tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist", "tf.Tensor. output_b_init (callable): Initializer function for the bias of output", "to maintain a linear activation. output_w_init (callable): Initializer function for", "std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity,", "for the bias of output dense layer(s). The function should", "hidden units. min_std (float): If not None, the std is", "std should be parametrized. There are two options: - exp:", "Initializer function for the weight of output dense layer(s) in", "*dims], we need to squeeze the extra # dimension normalized_dist_log_std", "the std network. std_parameterization (str): How the std should be", "tf.Tensor: Normalized log_std. tfp.distributions.MultivariateNormalDiag: Vanilla distribution. tf.Tensor: Vanilla mean. tf.Tensor:", "hidden_w_init (callable): Initializer function for the weight of intermediate dense", "dense layer(s) in the std network. std_output_nonlinearity (callable): Activation function", "seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes,", "normalized_dist_log_std + tf.math.log(y_std_var) normalized_dist = tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist =", "hidden_sizes (list[int]): Output dimension of dense layer(s) for the MLP", "(float): If not None, the std is at least the", "dense layer. It should return a tf.Tensor. Set it to", "linear activation. output_w_init (callable): Initializer function for the weight of", "std_share_network=std_share_network, init_std=init_std, min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape", "of dense layer(s) for the MLP for mean. For example,", "output dense layer(s) in the std network. std_parameterization (str): How", "key(str) for the network outputs. \"\"\" return [ 'normalized_dist', 'normalized_mean',", "copies the configuration and parameters of the primitive. Args: name", "function for the bias of output dense layer(s). The function", "in the std network. std_hidden_b_init (callable): Initializer function for the", "the variable scope of the inner model, if exist. One", "+ y_mean_var with tf.name_scope('std_network'): log_stds_var = normalized_dist_log_std + tf.math.log(y_std_var) normalized_dist", "init_std=init_std, min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape =", "x_std_var, y_mean_var, y_std_var) def clone(self, name): \"\"\"Return a clone of", "of the primitive. Args: name (str): Name of the newly", "initializer=tf.zeros_initializer(), trainable=False) y_std_var = tf.compat.v1.get_variable( name='y_std_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(),", "cloned model. \"\"\" new_regressor = self.__class__( name=name, input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes,", "it will be a parameter. std_share_network (bool): Boolean for whether", "as tf import tensorflow_probability as tfp from garage.experiment import deterministic", "32 hidden units. hidden_nonlinearity (callable): Activation function for intermediate dense", "Newly cloned model. \"\"\" new_regressor = self.__class__( name=name, input_shape=self._input_shape, output_dim=self._output_dim,", "The function should return a tf.Tensor. learn_std (bool): Is std", "from garage.experiment import deterministic from garage.tf.models import GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel):", "Gaussian distribution to the outputs. Args: input_shape (tuple[int]): Input shape", "value for std. adaptive_std (bool): Is std a neural network.", "\"\"\" new_regressor = self.__class__( name=name, input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init,", "def _build(self, state_input, name=None): \"\"\"Build model given input placeholder(s). Args:", "for the bias of intermediate dense layer(s) in the std", "std trainable. init_std (float): Initial value for std. adaptive_std (bool):", "network. std_output_nonlinearity (callable): Activation function for output dense layer in", "+ tf.math.log(y_std_var) normalized_dist = tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist = tfp.distributions.MultivariateNormalDiag(", "and parameters of the primitive. Args: name (str): Name of", "mean. tf.Tensor: Vanilla log_std. tf.Tensor: Mean for data. tf.Tensor: log_std", "new_regressor = self.__class__( name=name, input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init,", "the model. name (str): Model name, also the variable scope.", "learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init,", "Set it to None to maintain a linear activation. hidden_w_init", "graph. Returns: garage.tf.policies.GaussianMLPModel: Newly cloned model. \"\"\" new_regressor = self.__class__(", "hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std, min_std=min_std, max_std=max_std,", "hidden_b_init (callable): Initializer function for the bias of intermediate dense", "the outputs. Args: input_shape (tuple[int]): Input shape of the training", "For example, (32, 32) means the MLP consists of two", "import deterministic from garage.tf.models import GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based", "return a tf.Tensor. output_b_init (callable): Initializer function for the bias", "garage.tf.policies.GaussianMLPModel: Newly cloned model. \"\"\" new_regressor = self.__class__( name=name, input_shape=self._input_shape,", "output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False, std_share_network=False, init_std=1.0, min_std=1e-6, max_std=None,", "tfp.distributions.MultivariateNormalDiag: Vanilla distribution. tf.Tensor: Vanilla mean. tf.Tensor: Vanilla log_std. tf.Tensor:", "in the std network. std_hidden_w_init (callable): Initializer function for the", "std is at least the value of min_std, to avoid", "def __init__(self, input_shape, output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()),", "the std network. std_hidden_w_init (callable): Initializer function for the weight", "Initializer function for the weight of intermediate dense layer(s) in", "bias of intermediate dense layer(s). The function should return a", "fitting a Gaussian distribution to the outputs. Args: input_shape (tuple[int]):", "same computational graph. Returns: garage.tf.policies.GaussianMLPModel: Newly cloned model. \"\"\" new_regressor", "log_std for data. tf.Tensor: Mean for label. tf.Tensor: log_std for", "training data. output_dim (int): Output dimension of the model. name", "the logarithm of the std will be stored, and applied", "hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std,", "log_stds_var = normalized_dist_log_std + tf.math.log(y_std_var) normalized_dist = tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std))", "Initializer function for the bias of output dense layer(s). The", "seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False, std_share_network=False, init_std=1.0,", "log_std. tf.Tensor: Mean for data. tf.Tensor: log_std for data. tf.Tensor:", "input. name (str): Inner model name, also the variable scope", "Since regressor expects [N, *dims], we need to squeeze the", "False, it will be a parameter. std_share_network (bool): Boolean for", "for the weight of output dense layer(s) in the std", "distribution. tf.Tensor: Vanilla mean. tf.Tensor: Vanilla log_std. tf.Tensor: Mean for", "Activation function for intermediate dense layer(s). It should return a", "will be computed as log(1+exp(x)) layer_normalization (bool): Bool for using", "std_share_network=False, init_std=1.0, min_std=1e-6, max_std=None, std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(),", "output_w_init (callable): Initializer function for the weight of output dense", "tensorflow_probability as tfp from garage.experiment import deterministic from garage.tf.models import" ]
[ "= datasets.ImageFolder(valdir, transform_test) num_classes=1000 testloader = torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False, pin_memory=True,", "True, True) pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res", "test_loss = 0 correct_1 = 0 correct_5 = 0 total", "len(testloader), 'Loss: %.3f | Acc: %.3f%% (%d/%d)' % (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total),", "= nn.DataParallel(net) net=net.to(device) criterion = RefineryLoss() def accuracy(output, target, topk=(1,)):", "k\"\"\" with torch.no_grad(): maxk = max(topk) batch_size = target.size(0) _,", "= torch.cuda.is_available() print('Using input path: %s' % args.inputdir) checkpoint =", "+= prec1 correct_5 += prec5 progress_bar(batch_idx, len(testloader), 'Loss: %.3f |", "transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize, ]) #imagenet testset = datasets.ImageFolder(valdir,", "import torch.optim as optim import torch.nn.functional as F import torch.backends.cudnn", "F import torch.backends.cudnn as cudnn import torchvision import torchvision.transforms as", "as optim import torch.nn.functional as F import torch.backends.cudnn as cudnn", "import Variable from extensions.utils import progress_bar from extensions.model_refinery_wrapper import ModelRefineryWrapper", "net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net = ModelRefineryWrapper(net, label_refinery) device = torch.device(\"cuda:0\" if", "pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k in topk: correct_k", "[] for k in topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k)", "datasets.ImageFolder(valdir, transform_test) num_classes=1000 testloader = torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False, pin_memory=True, num_workers=30)", "checkpoint = torch.load(args.inputdir) init_net = checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net =", "outputs = loss else: loss_value = loss test_loss += loss_value.item()", "else: loss_value = loss test_loss += loss_value.item() prec1, prec5 =", "of k\"\"\" with torch.no_grad(): maxk = max(topk) batch_size = target.size(0)", "0 total = 0 for batch_idx, (inputs, targets) in enumerate(testloader):", "k in topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k) return res", "input path: %s' % args.inputdir) checkpoint = torch.load(args.inputdir) init_net =", "= output.topk(maxk, 1, True, True) pred = pred.t() correct =", "prec1 correct_5 += prec5 progress_bar(batch_idx, len(testloader), 'Loss: %.3f | Acc:", "= loss test_loss += loss_value.item() prec1, prec5 = accuracy(outputs, targets,", "> 1: print(\"Let's use\", torch.cuda.device_count(), \"GPUs!\") net = nn.DataParallel(net) net=net.to(device)", "import torchvision.datasets as datasets import os import argparse from torch.autograd", "parser = argparse.ArgumentParser(description='PyTorch imagenet inference') parser.add_argument('--datadir', help='path to dataset') parser.add_argument('--inputdir',", "imagenet inference') parser.add_argument('--datadir', help='path to dataset') parser.add_argument('--inputdir', help='path to input", "code valdir = os.path.join(args.datadir, 'val') normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],", "if torch.cuda.device_count() > 1: print(\"Let's use\", torch.cuda.device_count(), \"GPUs!\") net =", "correct_1 = 0 correct_5 = 0 total = 0 for", "inference') parser.add_argument('--datadir', help='path to dataset') parser.add_argument('--inputdir', help='path to input model')", "= 0 correct_5 = 0 total = 0 for batch_idx,", "pin_memory=True, num_workers=30) use_cuda = torch.cuda.is_available() print('Using input path: %s' %", "loss_value.item() prec1, prec5 = accuracy(outputs, targets, topk=(1, 5)) total +=", "num_classes=1000 testloader = torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False, pin_memory=True, num_workers=30) use_cuda =", "loading code valdir = os.path.join(args.datadir, 'val') normalize = transforms.Normalize(mean=[0.485, 0.456,", "torch.backends.cudnn as cudnn import torchvision import torchvision.transforms as transforms import", "0 correct_5 = 0 total = 0 for batch_idx, (inputs,", "accuracy(outputs, targets, topk=(1, 5)) total += targets.size(0) correct_1 += prec1", "+= prec5 progress_bar(batch_idx, len(testloader), 'Loss: %.3f | Acc: %.3f%% (%d/%d)'", "(test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1, total)) return 100.*float(correct_1)/float(total),100.*float(correct_5)/float(total),test_loss acc1,acc5,loss=test() print('top-1 accuracy: {0:.3f}%,", "use_cuda: inputs, targets = inputs.cuda(device), targets.cuda(device) with torch.no_grad(): outputs =", "Acc: %.3f%% (%d/%d)' % (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1, total)) return 100.*float(correct_1)/float(total),100.*float(correct_5)/float(total),test_loss", "= accuracy(outputs, targets, topk=(1, 5)) total += targets.size(0) correct_1 +=", "100.*float(correct_1)/float(total), correct_1, total)) return 100.*float(correct_1)/float(total),100.*float(correct_5)/float(total),test_loss acc1,acc5,loss=test() print('top-1 accuracy: {0:.3f}%, top-5", "#imagenet testset = datasets.ImageFolder(valdir, transform_test) num_classes=1000 testloader = torch.utils.data.DataLoader(testset, batch_size=1000,", "with torch.no_grad(): outputs = net(inputs) loss = criterion(outputs, targets) if", "% args.inputdir) checkpoint = torch.load(args.inputdir) init_net = checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7')", "res = [] for k in topk: correct_k = correct[:k].view(-1).float().sum(0,", "5)) total += targets.size(0) correct_1 += prec1 correct_5 += prec5", "parser.add_argument('--inputdir', help='path to input model') args = parser.parse_args() # Data", "% (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1, total)) return 100.*float(correct_1)/float(total),100.*float(correct_5)/float(total),test_loss acc1,acc5,loss=test() print('top-1 accuracy:", "total)) return 100.*float(correct_1)/float(total),100.*float(correct_5)/float(total),test_loss acc1,acc5,loss=test() print('top-1 accuracy: {0:.3f}%, top-5 accuracy: {1:.3f}%'.format(acc1,acc5))", "loss_value, outputs = loss else: loss_value = loss test_loss +=", "total += targets.size(0) correct_1 += prec1 correct_5 += prec5 progress_bar(batch_idx,", "= 0 correct_1 = 0 correct_5 = 0 total =", "inputs, targets = inputs.cuda(device), targets.cuda(device) with torch.no_grad(): outputs = net(inputs)", "0.224, 0.225]) transform_test = transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize, ])", "transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize, ]) #imagenet testset = datasets.ImageFolder(valdir, transform_test)", "criterion = RefineryLoss() def accuracy(output, target, topk=(1,)): \"\"\"Computes the precision@k", "as transforms import torchvision.datasets as datasets import os import argparse", "nn.DataParallel(net) net=net.to(device) criterion = RefineryLoss() def accuracy(output, target, topk=(1,)): \"\"\"Computes", "input model') args = parser.parse_args() # Data print('==> Preparing data..')", "accuracy(output, target, topk=(1,)): \"\"\"Computes the precision@k for the specified values", "print('==> Preparing data..') # Data loading code valdir = os.path.join(args.datadir,", "import argparse from torch.autograd import Variable from extensions.utils import progress_bar", "net.eval() criterion.eval() test_loss = 0 correct_1 = 0 correct_5 =", "inputs.cuda(device), targets.cuda(device) with torch.no_grad(): outputs = net(inputs) loss = criterion(outputs,", "0.225]) transform_test = transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize, ]) #imagenet", "import torchvision import torchvision.transforms as transforms import torchvision.datasets as datasets", "1, True, True) pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred))", "help='path to dataset') parser.add_argument('--inputdir', help='path to input model') args =", "from extensions.utils import progress_bar from extensions.model_refinery_wrapper import ModelRefineryWrapper from extensions.refinery_loss", "%.3f%% (%d/%d)' % (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1, total)) return 100.*float(correct_1)/float(total),100.*float(correct_5)/float(total),test_loss acc1,acc5,loss=test()", "= transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) transform_test = transforms.Compose([", "torch.nn.functional as F import torch.backends.cudnn as cudnn import torchvision import", "num_workers=30) use_cuda = torch.cuda.is_available() print('Using input path: %s' % args.inputdir)", "= 0 total = 0 for batch_idx, (inputs, targets) in", "import progress_bar from extensions.model_refinery_wrapper import ModelRefineryWrapper from extensions.refinery_loss import RefineryLoss", "path: %s' % args.inputdir) checkpoint = torch.load(args.inputdir) init_net = checkpoint['net']", "+= loss_value.item() prec1, prec5 = accuracy(outputs, targets, topk=(1, 5)) total", "= argparse.ArgumentParser(description='PyTorch imagenet inference') parser.add_argument('--datadir', help='path to dataset') parser.add_argument('--inputdir', help='path", "= transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize, ]) #imagenet testset =", "True) pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res =", "criterion.eval() test_loss = 0 correct_1 = 0 correct_5 = 0", "torch import torch.nn as nn import torch.optim as optim import", "# Data print('==> Preparing data..') # Data loading code valdir", "targets) if isinstance(loss, tuple): loss_value, outputs = loss else: loss_value", "]) #imagenet testset = datasets.ImageFolder(valdir, transform_test) num_classes=1000 testloader = torch.utils.data.DataLoader(testset,", "= os.path.join(args.datadir, 'val') normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224,", "nn import torch.optim as optim import torch.nn.functional as F import", "pred = output.topk(maxk, 1, True, True) pred = pred.t() correct", "res def test(): net.eval() criterion.eval() test_loss = 0 correct_1 =", "Variable from extensions.utils import progress_bar from extensions.model_refinery_wrapper import ModelRefineryWrapper from", "as nn import torch.optim as optim import torch.nn.functional as F", "model') args = parser.parse_args() # Data print('==> Preparing data..') #", "net = ModelRefineryWrapper(net, label_refinery) device = torch.device(\"cuda:0\" if torch.cuda.is_available() else", "transforms import torchvision.datasets as datasets import os import argparse from", "ModelRefineryWrapper(net, label_refinery) device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") print(device)", "= RefineryLoss() def accuracy(output, target, topk=(1,)): \"\"\"Computes the precision@k for", "pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res = []", "torch.no_grad(): outputs = net(inputs) loss = criterion(outputs, targets) if isinstance(loss,", "targets.size(0) correct_1 += prec1 correct_5 += prec5 progress_bar(batch_idx, len(testloader), 'Loss:", "valdir = os.path.join(args.datadir, 'val') normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229,", "progress_bar from extensions.model_refinery_wrapper import ModelRefineryWrapper from extensions.refinery_loss import RefineryLoss from", "import torchvision.transforms as transforms import torchvision.datasets as datasets import os", "correct_1, total)) return 100.*float(correct_1)/float(total),100.*float(correct_5)/float(total),test_loss acc1,acc5,loss=test() print('top-1 accuracy: {0:.3f}%, top-5 accuracy:", "datasets import os import argparse from torch.autograd import Variable from", "= ModelRefineryWrapper(net, label_refinery) device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")", "= net(inputs) loss = criterion(outputs, targets) if isinstance(loss, tuple): loss_value,", "net(inputs) loss = criterion(outputs, targets) if isinstance(loss, tuple): loss_value, outputs", "import DiracDeltaNet_wrapper parser = argparse.ArgumentParser(description='PyTorch imagenet inference') parser.add_argument('--datadir', help='path to", "\"cpu\") print(device) if torch.cuda.device_count() > 1: print(\"Let's use\", torch.cuda.device_count(), \"GPUs!\")", "net = nn.DataParallel(net) net=net.to(device) criterion = RefineryLoss() def accuracy(output, target,", "os import argparse from torch.autograd import Variable from extensions.utils import", "from models import DiracDeltaNet_wrapper parser = argparse.ArgumentParser(description='PyTorch imagenet inference') parser.add_argument('--datadir',", "loss = criterion(outputs, targets) if isinstance(loss, tuple): loss_value, outputs =", "argparse from torch.autograd import Variable from extensions.utils import progress_bar from", "ShuffleNetv2_wrapper from models import DiracDeltaNet_wrapper parser = argparse.ArgumentParser(description='PyTorch imagenet inference')", "Data loading code valdir = os.path.join(args.datadir, 'val') normalize = transforms.Normalize(mean=[0.485,", "torch.cuda.device_count() > 1: print(\"Let's use\", torch.cuda.device_count(), \"GPUs!\") net = nn.DataParallel(net)", "torchvision.datasets as datasets import os import argparse from torch.autograd import", "to input model') args = parser.parse_args() # Data print('==> Preparing", "the precision@k for the specified values of k\"\"\" with torch.no_grad():", "torch.nn as nn import torch.optim as optim import torch.nn.functional as", "<filename>test.py import torch import torch.nn as nn import torch.optim as", "targets = inputs.cuda(device), targets.cuda(device) with torch.no_grad(): outputs = net(inputs) loss", "print(device) if torch.cuda.device_count() > 1: print(\"Let's use\", torch.cuda.device_count(), \"GPUs!\") net", "extensions.model_refinery_wrapper import ModelRefineryWrapper from extensions.refinery_loss import RefineryLoss from models import", "enumerate(testloader): if use_cuda: inputs, targets = inputs.cuda(device), targets.cuda(device) with torch.no_grad():", "'Loss: %.3f | Acc: %.3f%% (%d/%d)' % (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1,", "_, pred = output.topk(maxk, 1, True, True) pred = pred.t()", "0.456, 0.406], std=[0.229, 0.224, 0.225]) transform_test = transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224),", "1: print(\"Let's use\", torch.cuda.device_count(), \"GPUs!\") net = nn.DataParallel(net) net=net.to(device) criterion", "torch.cuda.device_count(), \"GPUs!\") net = nn.DataParallel(net) net=net.to(device) criterion = RefineryLoss() def", "to dataset') parser.add_argument('--inputdir', help='path to input model') args = parser.parse_args()", "= pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res = [] for", "= torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") print(device) if torch.cuda.device_count() >", "max(topk) batch_size = target.size(0) _, pred = output.topk(maxk, 1, True,", "os.path.join(args.datadir, 'val') normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])", "ModelRefineryWrapper from extensions.refinery_loss import RefineryLoss from models import ShuffleNetv2_wrapper from", "argparse.ArgumentParser(description='PyTorch imagenet inference') parser.add_argument('--datadir', help='path to dataset') parser.add_argument('--inputdir', help='path to", "\"GPUs!\") net = nn.DataParallel(net) net=net.to(device) criterion = RefineryLoss() def accuracy(output,", "correct_5 += prec5 progress_bar(batch_idx, len(testloader), 'Loss: %.3f | Acc: %.3f%%", "= max(topk) batch_size = target.size(0) _, pred = output.topk(maxk, 1,", "progress_bar(batch_idx, len(testloader), 'Loss: %.3f | Acc: %.3f%% (%d/%d)' % (test_loss/(batch_idx+1),", "import torch import torch.nn as nn import torch.optim as optim", "shuffle=False, pin_memory=True, num_workers=30) use_cuda = torch.cuda.is_available() print('Using input path: %s'", "loss_value = loss test_loss += loss_value.item() prec1, prec5 = accuracy(outputs,", "dataset') parser.add_argument('--inputdir', help='path to input model') args = parser.parse_args() #", "checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net = ModelRefineryWrapper(net, label_refinery) device = torch.device(\"cuda:0\"", "%s' % args.inputdir) checkpoint = torch.load(args.inputdir) init_net = checkpoint['net'] net=init_net.to('cpu')", "else \"cpu\") print(device) if torch.cuda.device_count() > 1: print(\"Let's use\", torch.cuda.device_count(),", "label_refinery=torch.load('./resnet50.t7') net = ModelRefineryWrapper(net, label_refinery) device = torch.device(\"cuda:0\" if torch.cuda.is_available()", "precision@k for the specified values of k\"\"\" with torch.no_grad(): maxk", "specified values of k\"\"\" with torch.no_grad(): maxk = max(topk) batch_size", "isinstance(loss, tuple): loss_value, outputs = loss else: loss_value = loss", "batch_size=1000, shuffle=False, pin_memory=True, num_workers=30) use_cuda = torch.cuda.is_available() print('Using input path:", "%.3f | Acc: %.3f%% (%d/%d)' % (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1, total))", "for the specified values of k\"\"\" with torch.no_grad(): maxk =", "output.topk(maxk, 1, True, True) pred = pred.t() correct = pred.eq(target.view(1,", "import torch.nn as nn import torch.optim as optim import torch.nn.functional", "as F import torch.backends.cudnn as cudnn import torchvision import torchvision.transforms", "res.append(correct_k) return res def test(): net.eval() criterion.eval() test_loss = 0", "import torch.backends.cudnn as cudnn import torchvision import torchvision.transforms as transforms", "help='path to input model') args = parser.parse_args() # Data print('==>", "0 correct_1 = 0 correct_5 = 0 total = 0", "loss else: loss_value = loss test_loss += loss_value.item() prec1, prec5", "test_loss += loss_value.item() prec1, prec5 = accuracy(outputs, targets, topk=(1, 5))", "criterion(outputs, targets) if isinstance(loss, tuple): loss_value, outputs = loss else:", "in topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k) return res def", "import os import argparse from torch.autograd import Variable from extensions.utils", "use_cuda = torch.cuda.is_available() print('Using input path: %s' % args.inputdir) checkpoint", "DiracDeltaNet_wrapper parser = argparse.ArgumentParser(description='PyTorch imagenet inference') parser.add_argument('--datadir', help='path to dataset')", "parser.add_argument('--datadir', help='path to dataset') parser.add_argument('--inputdir', help='path to input model') args", "import RefineryLoss from models import ShuffleNetv2_wrapper from models import DiracDeltaNet_wrapper", "data..') # Data loading code valdir = os.path.join(args.datadir, 'val') normalize", "= loss else: loss_value = loss test_loss += loss_value.item() prec1,", "for k in topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k) return", "= correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k) return res def test(): net.eval() criterion.eval()", "(%d/%d)' % (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1, total)) return 100.*float(correct_1)/float(total),100.*float(correct_5)/float(total),test_loss acc1,acc5,loss=test() print('top-1", "= parser.parse_args() # Data print('==> Preparing data..') # Data loading", "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") print(device) if torch.cuda.device_count()", "models import DiracDeltaNet_wrapper parser = argparse.ArgumentParser(description='PyTorch imagenet inference') parser.add_argument('--datadir', help='path", "from extensions.model_refinery_wrapper import ModelRefineryWrapper from extensions.refinery_loss import RefineryLoss from models", "net=net.to(device) criterion = RefineryLoss() def accuracy(output, target, topk=(1,)): \"\"\"Computes the", "prec5 = accuracy(outputs, targets, topk=(1, 5)) total += targets.size(0) correct_1", "extensions.refinery_loss import RefineryLoss from models import ShuffleNetv2_wrapper from models import", "transforms.CenterCrop(224), transforms.ToTensor(), normalize, ]) #imagenet testset = datasets.ImageFolder(valdir, transform_test) num_classes=1000", "RefineryLoss() def accuracy(output, target, topk=(1,)): \"\"\"Computes the precision@k for the", "= target.size(0) _, pred = output.topk(maxk, 1, True, True) pred", "topk=(1, 5)) total += targets.size(0) correct_1 += prec1 correct_5 +=", "transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) transform_test = transforms.Compose([ transforms.Resize(256),", "std=[0.229, 0.224, 0.225]) transform_test = transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize,", "prec5 progress_bar(batch_idx, len(testloader), 'Loss: %.3f | Acc: %.3f%% (%d/%d)' %", "batch_idx, (inputs, targets) in enumerate(testloader): if use_cuda: inputs, targets =", "from torch.autograd import Variable from extensions.utils import progress_bar from extensions.model_refinery_wrapper", "init_net = checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net = ModelRefineryWrapper(net, label_refinery) device", "torch.load(args.inputdir) init_net = checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net = ModelRefineryWrapper(net, label_refinery)", "normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) transform_test =", "as datasets import os import argparse from torch.autograd import Variable", "transform_test = transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize, ]) #imagenet testset", "# Data loading code valdir = os.path.join(args.datadir, 'val') normalize =", "0.406], std=[0.229, 0.224, 0.225]) transform_test = transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(),", "maxk = max(topk) batch_size = target.size(0) _, pred = output.topk(maxk,", "if use_cuda: inputs, targets = inputs.cuda(device), targets.cuda(device) with torch.no_grad(): outputs", "outputs = net(inputs) loss = criterion(outputs, targets) if isinstance(loss, tuple):", "= criterion(outputs, targets) if isinstance(loss, tuple): loss_value, outputs = loss", "correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k) return res def test(): net.eval()", "as cudnn import torchvision import torchvision.transforms as transforms import torchvision.datasets", "return res def test(): net.eval() criterion.eval() test_loss = 0 correct_1", "= 0 for batch_idx, (inputs, targets) in enumerate(testloader): if use_cuda:", "targets) in enumerate(testloader): if use_cuda: inputs, targets = inputs.cuda(device), targets.cuda(device)", "targets, topk=(1, 5)) total += targets.size(0) correct_1 += prec1 correct_5", "torchvision.transforms as transforms import torchvision.datasets as datasets import os import", "from extensions.refinery_loss import RefineryLoss from models import ShuffleNetv2_wrapper from models", "tuple): loss_value, outputs = loss else: loss_value = loss test_loss", "batch_size = target.size(0) _, pred = output.topk(maxk, 1, True, True)", "def accuracy(output, target, topk=(1,)): \"\"\"Computes the precision@k for the specified", "(inputs, targets) in enumerate(testloader): if use_cuda: inputs, targets = inputs.cuda(device),", "torch.optim as optim import torch.nn.functional as F import torch.backends.cudnn as", "extensions.utils import progress_bar from extensions.model_refinery_wrapper import ModelRefineryWrapper from extensions.refinery_loss import", "label_refinery) device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") print(device) if", "+= targets.size(0) correct_1 += prec1 correct_5 += prec5 progress_bar(batch_idx, len(testloader),", "= torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False, pin_memory=True, num_workers=30) use_cuda = torch.cuda.is_available() print('Using", "target.size(0) _, pred = output.topk(maxk, 1, True, True) pred =", "use\", torch.cuda.device_count(), \"GPUs!\") net = nn.DataParallel(net) net=net.to(device) criterion = RefineryLoss()", "'val') normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) transform_test", "prec1, prec5 = accuracy(outputs, targets, topk=(1, 5)) total += targets.size(0)", "in enumerate(testloader): if use_cuda: inputs, targets = inputs.cuda(device), targets.cuda(device) with", "pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k", "loss test_loss += loss_value.item() prec1, prec5 = accuracy(outputs, targets, topk=(1,", "optim import torch.nn.functional as F import torch.backends.cudnn as cudnn import", "| Acc: %.3f%% (%d/%d)' % (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1, total)) return", "for batch_idx, (inputs, targets) in enumerate(testloader): if use_cuda: inputs, targets", "cudnn import torchvision import torchvision.transforms as transforms import torchvision.datasets as", "values of k\"\"\" with torch.no_grad(): maxk = max(topk) batch_size =", "-1).expand_as(pred)) res = [] for k in topk: correct_k =", "models import ShuffleNetv2_wrapper from models import DiracDeltaNet_wrapper parser = argparse.ArgumentParser(description='PyTorch", "= [] for k in topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True)", "total = 0 for batch_idx, (inputs, targets) in enumerate(testloader): if", "import ModelRefineryWrapper from extensions.refinery_loss import RefineryLoss from models import ShuffleNetv2_wrapper", "topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k) return res def test():", "if isinstance(loss, tuple): loss_value, outputs = loss else: loss_value =", "correct_1 += prec1 correct_5 += prec5 progress_bar(batch_idx, len(testloader), 'Loss: %.3f", "torchvision import torchvision.transforms as transforms import torchvision.datasets as datasets import", "= inputs.cuda(device), targets.cuda(device) with torch.no_grad(): outputs = net(inputs) loss =", "RefineryLoss from models import ShuffleNetv2_wrapper from models import DiracDeltaNet_wrapper parser", "args = parser.parse_args() # Data print('==> Preparing data..') # Data", "correct = pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k in", "transform_test) num_classes=1000 testloader = torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False, pin_memory=True, num_workers=30) use_cuda", "topk=(1,)): \"\"\"Computes the precision@k for the specified values of k\"\"\"", "args.inputdir) checkpoint = torch.load(args.inputdir) init_net = checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net", "= pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k in topk:", "target, topk=(1,)): \"\"\"Computes the precision@k for the specified values of", "\"\"\"Computes the precision@k for the specified values of k\"\"\" with", "transforms.ToTensor(), normalize, ]) #imagenet testset = datasets.ImageFolder(valdir, transform_test) num_classes=1000 testloader", "from models import ShuffleNetv2_wrapper from models import DiracDeltaNet_wrapper parser =", "import torch.nn.functional as F import torch.backends.cudnn as cudnn import torchvision", "torch.no_grad(): maxk = max(topk) batch_size = target.size(0) _, pred =", "torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") print(device) if torch.cuda.device_count() > 1:", "print('Using input path: %s' % args.inputdir) checkpoint = torch.load(args.inputdir) init_net", "0 for batch_idx, (inputs, targets) in enumerate(testloader): if use_cuda: inputs,", "keepdim=True) res.append(correct_k) return res def test(): net.eval() criterion.eval() test_loss =", "torch.autograd import Variable from extensions.utils import progress_bar from extensions.model_refinery_wrapper import", "testloader = torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False, pin_memory=True, num_workers=30) use_cuda = torch.cuda.is_available()", "test(): net.eval() criterion.eval() test_loss = 0 correct_1 = 0 correct_5", "import ShuffleNetv2_wrapper from models import DiracDeltaNet_wrapper parser = argparse.ArgumentParser(description='PyTorch imagenet", "= checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net = ModelRefineryWrapper(net, label_refinery) device =", "Preparing data..') # Data loading code valdir = os.path.join(args.datadir, 'val')", "torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False, pin_memory=True, num_workers=30) use_cuda = torch.cuda.is_available() print('Using input", "torch.cuda.is_available() print('Using input path: %s' % args.inputdir) checkpoint = torch.load(args.inputdir)", "= torch.load(args.inputdir) init_net = checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net = ModelRefineryWrapper(net,", "normalize, ]) #imagenet testset = datasets.ImageFolder(valdir, transform_test) num_classes=1000 testloader =", "the specified values of k\"\"\" with torch.no_grad(): maxk = max(topk)", "parser.parse_args() # Data print('==> Preparing data..') # Data loading code", "if torch.cuda.is_available() else \"cpu\") print(device) if torch.cuda.device_count() > 1: print(\"Let's", "print(\"Let's use\", torch.cuda.device_count(), \"GPUs!\") net = nn.DataParallel(net) net=net.to(device) criterion =", "testset = datasets.ImageFolder(valdir, transform_test) num_classes=1000 testloader = torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False,", "correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k) return res def test(): net.eval() criterion.eval() test_loss", "Data print('==> Preparing data..') # Data loading code valdir =", "torch.cuda.is_available() else \"cpu\") print(device) if torch.cuda.device_count() > 1: print(\"Let's use\",", "targets.cuda(device) with torch.no_grad(): outputs = net(inputs) loss = criterion(outputs, targets)", "correct_5 = 0 total = 0 for batch_idx, (inputs, targets)", "def test(): net.eval() criterion.eval() test_loss = 0 correct_1 = 0", "with torch.no_grad(): maxk = max(topk) batch_size = target.size(0) _, pred" ]
[ "= { 'adadelta': optim.Adadelta, 'adagrad': optim.Adagrad, 'adam': optim.Adam, 'adamax': optim.Adamax,", "as optim from .search import SamplingSearch, GreedySearch, BeamSearch SEARCH_FACTORY =", "SamplingSearch, 'greedy': GreedySearch, 'beam': BeamSearch, } OPTIMIZER_FACTORY = { 'adadelta':", "} OPTIMIZER_FACTORY = { 'adadelta': optim.Adadelta, 'adagrad': optim.Adagrad, 'adam': optim.Adam,", "Parameters Module.\"\"\" import torch.optim as optim from .search import SamplingSearch,", "optim.Adadelta, 'adagrad': optim.Adagrad, 'adam': optim.Adam, 'adamax': optim.Adamax, 'rmsprop': optim.RMSprop, 'sgd':", "optim.Adagrad, 'adam': optim.Adam, 'adamax': optim.Adamax, 'rmsprop': optim.RMSprop, 'sgd': optim.SGD }", "{ 'sampling': SamplingSearch, 'greedy': GreedySearch, 'beam': BeamSearch, } OPTIMIZER_FACTORY =", "'adagrad': optim.Adagrad, 'adam': optim.Adam, 'adamax': optim.Adamax, 'rmsprop': optim.RMSprop, 'sgd': optim.SGD", "<filename>paccmann_chemistry/utils/hyperparams.py \"\"\"Model Parameters Module.\"\"\" import torch.optim as optim from .search", "'sampling': SamplingSearch, 'greedy': GreedySearch, 'beam': BeamSearch, } OPTIMIZER_FACTORY = {", "'greedy': GreedySearch, 'beam': BeamSearch, } OPTIMIZER_FACTORY = { 'adadelta': optim.Adadelta,", "SEARCH_FACTORY = { 'sampling': SamplingSearch, 'greedy': GreedySearch, 'beam': BeamSearch, }", "import torch.optim as optim from .search import SamplingSearch, GreedySearch, BeamSearch", "BeamSearch SEARCH_FACTORY = { 'sampling': SamplingSearch, 'greedy': GreedySearch, 'beam': BeamSearch,", ".search import SamplingSearch, GreedySearch, BeamSearch SEARCH_FACTORY = { 'sampling': SamplingSearch,", "optim from .search import SamplingSearch, GreedySearch, BeamSearch SEARCH_FACTORY = {", "GreedySearch, BeamSearch SEARCH_FACTORY = { 'sampling': SamplingSearch, 'greedy': GreedySearch, 'beam':", "= { 'sampling': SamplingSearch, 'greedy': GreedySearch, 'beam': BeamSearch, } OPTIMIZER_FACTORY", "'adadelta': optim.Adadelta, 'adagrad': optim.Adagrad, 'adam': optim.Adam, 'adamax': optim.Adamax, 'rmsprop': optim.RMSprop,", "OPTIMIZER_FACTORY = { 'adadelta': optim.Adadelta, 'adagrad': optim.Adagrad, 'adam': optim.Adam, 'adamax':", "torch.optim as optim from .search import SamplingSearch, GreedySearch, BeamSearch SEARCH_FACTORY", "{ 'adadelta': optim.Adadelta, 'adagrad': optim.Adagrad, 'adam': optim.Adam, 'adamax': optim.Adamax, 'rmsprop':", "\"\"\"Model Parameters Module.\"\"\" import torch.optim as optim from .search import", "from .search import SamplingSearch, GreedySearch, BeamSearch SEARCH_FACTORY = { 'sampling':", "Module.\"\"\" import torch.optim as optim from .search import SamplingSearch, GreedySearch,", "SamplingSearch, GreedySearch, BeamSearch SEARCH_FACTORY = { 'sampling': SamplingSearch, 'greedy': GreedySearch,", "GreedySearch, 'beam': BeamSearch, } OPTIMIZER_FACTORY = { 'adadelta': optim.Adadelta, 'adagrad':", "BeamSearch, } OPTIMIZER_FACTORY = { 'adadelta': optim.Adadelta, 'adagrad': optim.Adagrad, 'adam':", "import SamplingSearch, GreedySearch, BeamSearch SEARCH_FACTORY = { 'sampling': SamplingSearch, 'greedy':", "'beam': BeamSearch, } OPTIMIZER_FACTORY = { 'adadelta': optim.Adadelta, 'adagrad': optim.Adagrad," ]
[ "= lv._inference_posteriors( [np.random.randn(num_data, 3), np.random.randn(num_data, 2)] ) q_mu = posteriors.parameters[\"loc\"]", "2.0 (the \"License\"); # you may not use this file", "expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss) # also checks shapes match", "== priors posterior = distribution assert lv._local_kls(posterior) == 0 #", "} posterior = lv.distribution_class(**posterior_params) local_kls = lv._local_kls(posterior) assert np.all(local_kls >", "distributions.append((mvn_diag, d)) return distributions ############ # Tests ############ @pytest.mark.parametrize(\"distribution, w_dim\",", "LayerWithObservations is a subclass of tf.keras.layers.Layer (via TrackableLayer); this test", "= mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(encoder=encoder, prior=prior) inputs = np.full((num_data,", "encoder.assert_not_called() assert lv.losses == [0.0] _ = lv(inputs, observations=observations, training=True)", "[inputs, targets] encoder_inputs = np.concatenate(observations, axis=-1) _ = lv(inputs) encoder.assert_not_called()", "= lv(inputs) encoder.assert_not_called() assert lv.losses == [0.0] _ = lv(inputs,", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "distribution.parameters posterior_params = { k: [v + 0.5 for _", "x_dim = inputs.shape prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim)", "np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed) sample_posterior = lv(inputs,", "= tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc,", "np.all(local_kls > 0) assert local_kls.shape == (batch_size,) @pytest.mark.parametrize(\"w_dim\", [1, 5])", ") encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed)", "_ = lv(inputs, observations=observations, training=True) # assert_called_once_with uses == for", "LatentVariableLayer, LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############ # Utilities ############ def _zero_one_normal_prior(w_dim):", "local_kls.shape == (batch_size,) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_local_kl_gpflow_consistency(w_dim): num_data =", "DirectlyParameterizedNormalDiag from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############ #", "inputs, targets = test_data num_data, x_dim = inputs.shape prior_shape =", "encoder=encoder) tf.random.set_seed(seed) sample_prior = lv(inputs, seed=seed2) tf.random.set_seed(seed) prior_expected = np.concatenate([inputs,", "use this file except in compliance with the License. #", "# import abc import numpy as np import pytest import", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "License. # You may obtain a copy of the License", "encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed) sample_prior", "prior_expected) tf.random.set_seed(seed) sample_posterior = lv(inputs, observations=[inputs, targets], training=True, seed=seed2) tf.random.set_seed(seed)", "assert_called_once_with uses == for comparison which fails on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs),", "if isinstance(v, np.ndarray) } posterior = lv.distribution_class(**posterior_params) local_kls = lv._local_kls(posterior)", "3, 1 prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim) prior", "5]) def test_local_kl_gpflow_consistency(w_dim): num_data = 400 means = np.random.randn(num_data, w_dim)", "for _ in range(batch_size)] for k, v in params.items() if", "under the License is distributed on an \"AS IS\" BASIS,", "adding the ABCMeta to LayerWithObservations we are not accidentally removing", "License for the specific language governing permissions and # limitations", "tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2, ) posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape),", "(c) 2021 The GPflux Contributors. # # Licensed under the", "2)] ) q_mu = posteriors.parameters[\"loc\"] q_sqrt = posteriors.parameters[\"scale_diag\"] gpflow_local_kls =", "hence by adding the ABCMeta to LayerWithObservations we are not", "inputs = np.full((num_data, x_dim), np.nan) targets = np.full((num_data, y_dim), np.nan)", "TrackableLayer does not have a metaclass, and hence by adding", "123 inputs, targets = test_data num_data, x_dim = inputs.shape prior_shape", "def test_local_kl_gpflow_consistency(w_dim): num_data = 400 means = np.random.randn(num_data, w_dim) encoder", "prior \"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim(): distributions = []", "+ 0.5 for _ in range(batch_size)] for k, v in", "def test_latent_variable_layer_losses(mocker, w_dim): num_data, x_dim, y_dim = 43, 3, 1", "shapes match @pytest.mark.parametrize(\"w_dim\", [1, 5]) @pytest.mark.parametrize(\"seed2\", [None, 42]) def test_latent_variable_layer_samples(mocker,", "= { k: [v + 0.5 for _ in range(batch_size)]", "5]) @pytest.mark.parametrize(\"seed2\", [None, 42]) def test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2): seed", "in compliance with the License. # You may obtain a", "5]) def test_latent_variable_layer_losses(mocker, w_dim): num_data, x_dim, y_dim = 43, 3,", "np.full((num_data, y_dim), np.nan) observations = [inputs, targets] encoder_inputs = np.concatenate(observations,", "software # distributed under the License is distributed on an", "metaclass, and hence by adding the ABCMeta to LayerWithObservations we", "when posteriors == priors posterior = distribution assert lv._local_kls(posterior) ==", "= tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2, ) posteriors = tfp.distributions.MultivariateNormalDiag(", "lv._inference_posteriors( [np.random.randn(num_data, 3), np.random.randn(num_data, 2)] ) q_mu = posteriors.parameters[\"loc\"] q_sqrt", "a subclass of tf.keras.layers.Layer (via TrackableLayer); this test ensures that", "0 when posteriors != priors batch_size = 10 params =", "[1, 5]: mean = np.zeros(d) scale_tri_l = np.eye(d) mvn =", "not accidentally removing some required TensorFlow magic metaclass. \"\"\" assert", "observations=[inputs, targets], training=True, seed=seed2) tf.random.set_seed(seed) posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1)", "d)) return distributions ############ # Tests ############ @pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim())", "= posteriors.parameters[\"loc\"] q_sqrt = posteriors.parameters[\"scale_diag\"] gpflow_local_kls = gauss_kl(q_mu, q_sqrt) tfp_local_kls", "params.items() if isinstance(v, np.ndarray) } posterior = lv.distribution_class(**posterior_params) local_kls =", "############ def _zero_one_normal_prior(w_dim): \"\"\" N(0, I) prior \"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim),", "{ k: [v + 0.5 for _ in range(batch_size)] for", "axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected) def test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations is a subclass", "get_distributions_with_w_dim(): distributions = [] for d in [1, 5]: mean", "tf import tensorflow_probability as tfp from gpflow.kullback_leiblers import gauss_kl from", "np.ndarray) } posterior = lv.distribution_class(**posterior_params) local_kls = lv._local_kls(posterior) assert np.all(local_kls", "test kl > 0 when posteriors != priors batch_size =", "on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss) #", "posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder =", "= inputs.shape prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim) prior", "\"\"\" LayerWithObservations is a subclass of tf.keras.layers.Layer (via TrackableLayer); this", "test_local_kl_gpflow_consistency(w_dim): num_data = 400 means = np.random.randn(num_data, w_dim) encoder =", "v in params.items() if isinstance(v, np.ndarray) } posterior = lv.distribution_class(**posterior_params)", "assert LayerWithObservations.__bases__ == (TrackableLayer,) assert type(TrackableLayer) is type assert type(LayerWithObservations)", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "\"\"\" assert LayerWithObservations.__bases__ == (TrackableLayer,) assert type(TrackableLayer) is type assert", "gpflux.encoders import DirectlyParameterizedNormalDiag from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\")", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "w_dim) encoder = DirectlyParameterizedNormalDiag(num_data, w_dim, means) lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim))", "> 0) assert local_kls.shape == (batch_size,) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "targets = np.full((num_data, y_dim), np.nan) observations = [inputs, targets] encoder_inputs", "means = np.random.randn(num_data, w_dim) encoder = DirectlyParameterizedNormalDiag(num_data, w_dim, means) lv", "y_dim = 43, 3, 1 prior_shape = (w_dim,) posteriors_shape =", "0 when posteriors == priors posterior = distribution assert lv._local_kls(posterior)", "to in writing, software # distributed under the License is", "loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2, ) posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape)", "# See the License for the specific language governing permissions", "mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn, d)) distributions.append((mvn_diag, d)) return distributions", "TensorFlow magic metaclass. \"\"\" assert LayerWithObservations.__bases__ == (TrackableLayer,) assert type(TrackableLayer)", "language governing permissions and # limitations under the License. #", "posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected) def test_no_tensorflow_metaclass_overwritten(): \"\"\"", "or agreed to in writing, software # distributed under the", "comparison which fails on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))]", "required by applicable law or agreed to in writing, software", "LayerWithObservations.__bases__ == (TrackableLayer,) assert type(TrackableLayer) is type assert type(LayerWithObservations) is", "match @pytest.mark.parametrize(\"w_dim\", [1, 5]) @pytest.mark.parametrize(\"seed2\", [None, 42]) def test_latent_variable_layer_samples(mocker, test_data,", "LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors = lv._inference_posteriors( [np.random.randn(num_data, 3), np.random.randn(num_data, 2)] )", "seed=seed2) tf.random.set_seed(seed) prior_expected = np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected)", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "= lv(inputs, observations=[inputs, targets], training=True, seed=seed2) tf.random.set_seed(seed) posterior_expected = np.concatenate([inputs,", "############ # Tests ############ @pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim()) def test_local_kls(distribution, w_dim):", "priors posterior = distribution assert lv._local_kls(posterior) == 0 # test", "expected def __eq__(self, actual): return np.allclose(actual, self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1,", "with the License. # You may obtain a copy of", "q_sqrt = posteriors.parameters[\"scale_diag\"] gpflow_local_kls = gauss_kl(q_mu, q_sqrt) tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors))", "a metaclass, and hence by adding the ABCMeta to LayerWithObservations", "= tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn, d)) distributions.append((mvn_diag, d)) return distributions ############", "# assert_called_once_with uses == for comparison which fails on arrays", "compliance with the License. # You may obtain a copy", "limitations under the License. # import abc import numpy as", "posterior_params = { k: [v + 0.5 for _ in", "agreed to in writing, software # distributed under the License", "lv.distribution_class(**posterior_params) local_kls = lv._local_kls(posterior) assert np.all(local_kls > 0) assert local_kls.shape", "= test_data num_data, x_dim = inputs.shape prior_shape = (w_dim,) posteriors_shape", "params = distribution.parameters posterior_params = { k: [v + 0.5", "as np import pytest import tensorflow as tf import tensorflow_probability", "= lv(inputs, seed=seed2) tf.random.set_seed(seed) prior_expected = np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1)", "2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(encoder=encoder, prior=prior)", "distributed under the License is distributed on an \"AS IS\"", "5]: mean = np.zeros(d) scale_tri_l = np.eye(d) mvn = tfp.distributions.MultivariateNormalTriL(mean,", "\"\"\" N(0, I) prior \"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim():", "= np.eye(d) mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std = np.ones(d) mvn_diag", "posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected) def test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations is a", "axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed) sample_posterior = lv(inputs, observations=[inputs, targets], training=True,", "abc import numpy as np import pytest import tensorflow as", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "range(batch_size)] for k, v in params.items() if isinstance(v, np.ndarray) }", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim(): distributions = [] for d", "not use this file except in compliance with the License.", "= (num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2,", "= lv._local_kls(posterior) assert np.all(local_kls > 0) assert local_kls.shape == (batch_size,)", "posterior = lv.distribution_class(**posterior_params) local_kls = lv._local_kls(posterior) assert np.all(local_kls > 0)", "# limitations under the License. # import abc import numpy", "writing, software # distributed under the License is distributed on", "this test ensures that TrackableLayer does not have a metaclass,", "TrackableLayer); this test ensures that TrackableLayer does not have a", "np.zeros(d) scale_tri_l = np.eye(d) mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std =", "you may not use this file except in compliance with", ") encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(encoder=encoder, prior=prior) inputs", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "= np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed) sample_posterior =", "permissions and # limitations under the License. # import abc", "np.nan) observations = [inputs, targets] encoder_inputs = np.concatenate(observations, axis=-1) _", "of tf.keras.layers.Layer (via TrackableLayer); this test ensures that TrackableLayer does", "[] for d in [1, 5]: mean = np.zeros(d) scale_tri_l", "CONDITIONS OF ANY KIND, either express or implied. # See", "as tfp from gpflow.kullback_leiblers import gauss_kl from gpflux.encoders import DirectlyParameterizedNormalDiag", "which fails on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses,", "= expected def __eq__(self, actual): return np.allclose(actual, self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\",", "tfp from gpflow.kullback_leiblers import gauss_kl from gpflux.encoders import DirectlyParameterizedNormalDiag from", "def test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations is a subclass of tf.keras.layers.Layer (via", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "lv(inputs, seed=seed2) tf.random.set_seed(seed) prior_expected = np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior,", "inputs.shape prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim) prior =", "distributions.append((mvn, d)) distributions.append((mvn_diag, d)) return distributions ############ # Tests ############", "lv(inputs) encoder.assert_not_called() assert lv.losses == [0.0] _ = lv(inputs, observations=observations,", "uses == for comparison which fails on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True)", "== 0 # test kl > 0 when posteriors !=", "= tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10) class ArrayMatcher: def __init__(self, expected):", "d)) distributions.append((mvn_diag, d)) return distributions ############ # Tests ############ @pytest.mark.parametrize(\"distribution,", "is 0 when posteriors == priors posterior = distribution assert", "and # limitations under the License. # import abc import", "== (batch_size,) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_local_kl_gpflow_consistency(w_dim): num_data = 400", "gauss_kl(q_mu, q_sqrt) tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10) class ArrayMatcher:", "import gauss_kl from gpflux.encoders import DirectlyParameterizedNormalDiag from gpflux.layers import LatentVariableLayer,", "kl > 0 when posteriors != priors batch_size = 10", "= (w_dim,) posteriors_shape = (num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape),", "np.nan) targets = np.full((num_data, y_dim), np.nan) observations = [inputs, targets]", "= np.concatenate(observations, axis=-1) _ = lv(inputs) encoder.assert_not_called() assert lv.losses ==", "[v + 0.5 for _ in range(batch_size)] for k, v", "import tensorflow_probability as tfp from gpflow.kullback_leiblers import gauss_kl from gpflux.encoders", "3), np.random.randn(num_data, 2)] ) q_mu = posteriors.parameters[\"loc\"] q_sqrt = posteriors.parameters[\"scale_diag\"]", "OR CONDITIONS OF ANY KIND, either express or implied. #", "(via TrackableLayer); this test ensures that TrackableLayer does not have", "the License is distributed on an \"AS IS\" BASIS, #", "tf.random.set_seed(seed) posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected) def test_no_tensorflow_metaclass_overwritten():", "import LatentVariableLayer, LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############ # Utilities ############ def", "@pytest.mark.parametrize(\"w_dim\", [1, 5]) @pytest.mark.parametrize(\"seed2\", [None, 42]) def test_latent_variable_layer_samples(mocker, test_data, w_dim,", "are not accidentally removing some required TensorFlow magic metaclass. \"\"\"", "prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag(", "std = np.ones(d) mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn, d)) distributions.append((mvn_diag,", "priors batch_size = 10 params = distribution.parameters posterior_params = {", "targets = test_data num_data, x_dim = inputs.shape prior_shape = (w_dim,)", "scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv =", "Tests ############ @pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim()) def test_local_kls(distribution, w_dim): lv =", "assert lv.losses == [0.0] _ = lv(inputs, observations=observations, training=True) #", "encoder_inputs = np.concatenate(observations, axis=-1) _ = lv(inputs) encoder.assert_not_called() assert lv.losses", "prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2, ) posteriors =", "scale_diag=np.random.randn(*prior_shape) ** 2, ) posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) **", "tensorflow_probability as tfp from gpflow.kullback_leiblers import gauss_kl from gpflux.encoders import", "lv._local_kls(posterior) == 0 # test kl > 0 when posteriors", "ABCMeta to LayerWithObservations we are not accidentally removing some required", "tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std = np.ones(d) mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn,", "test ensures that TrackableLayer does not have a metaclass, and", "law or agreed to in writing, software # distributed under", "tf.random.set_seed(seed) sample_posterior = lv(inputs, observations=[inputs, targets], training=True, seed=seed2) tf.random.set_seed(seed) posterior_expected", "2021 The GPflux Contributors. # # Licensed under the Apache", "governing permissions and # limitations under the License. # import", "\"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim(): distributions = [] for", "posteriors == priors posterior = distribution assert lv._local_kls(posterior) == 0", "= 10 params = distribution.parameters posterior_params = { k: [v", "np.full((num_data, x_dim), np.nan) targets = np.full((num_data, y_dim), np.nan) observations =", "targets] encoder_inputs = np.concatenate(observations, axis=-1) _ = lv(inputs) encoder.assert_not_called() assert", "== [0.0] _ = lv(inputs, observations=observations, training=True) # assert_called_once_with uses", "ArrayMatcher: def __init__(self, expected): self.expected = expected def __eq__(self, actual):", "k: [v + 0.5 for _ in range(batch_size)] for k,", "test_data, w_dim, seed2): seed = 123 inputs, targets = test_data", "# test kl is 0 when posteriors == priors posterior", "y_dim), np.nan) observations = [inputs, targets] encoder_inputs = np.concatenate(observations, axis=-1)", "return distributions ############ # Tests ############ @pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim()) def", "self.expected = expected def __eq__(self, actual): return np.allclose(actual, self.expected, equal_nan=True)", "# # Copyright (c) 2021 The GPflux Contributors. # #", "for k, v in params.items() if isinstance(v, np.ndarray) } posterior", "gpflow_local_kls = gauss_kl(q_mu, q_sqrt) tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10)", "(batch_size,) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_local_kl_gpflow_consistency(w_dim): num_data = 400 means", "lv(inputs, observations=[inputs, targets], training=True, seed=seed2) tf.random.set_seed(seed) posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)],", "** 2, ) posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2,", "@pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_local_kl_gpflow_consistency(w_dim): num_data = 400 means =", "observations = [inputs, targets] encoder_inputs = np.concatenate(observations, axis=-1) _ =", "also checks shapes match @pytest.mark.parametrize(\"w_dim\", [1, 5]) @pytest.mark.parametrize(\"seed2\", [None, 42])", "lv = LatentVariableLayer(encoder=encoder, prior=prior) inputs = np.full((num_data, x_dim), np.nan) targets", "sample_prior = lv(inputs, seed=seed2) tf.random.set_seed(seed) prior_expected = np.concatenate([inputs, prior.sample(num_data, seed=seed2)],", "may obtain a copy of the License at # #", "GPflux Contributors. # # Licensed under the Apache License, Version", "tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim(): distributions = [] for d in", "posterior_expected) def test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations is a subclass of tf.keras.layers.Layer", "def get_distributions_with_w_dim(): distributions = [] for d in [1, 5]:", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "np import pytest import tensorflow as tf import tensorflow_probability as", "may not use this file except in compliance with the", "def test_local_kls(distribution, w_dim): lv = LatentVariableLayer(encoder=None, prior=distribution) # test kl", "LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed) sample_prior = lv(inputs, seed=seed2) tf.random.set_seed(seed) prior_expected =", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############ # Utilities", "this file except in compliance with the License. # You", "numpy as np import pytest import tensorflow as tf import", "def __eq__(self, actual): return np.allclose(actual, self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1, 5])", "= mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed) sample_prior =", "scale_tri_l) std = np.ones(d) mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn, d))", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "lv._local_kls(posterior) assert np.all(local_kls > 0) assert local_kls.shape == (batch_size,) @pytest.mark.parametrize(\"w_dim\",", "# # Licensed under the Apache License, Version 2.0 (the", "= tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std = np.ones(d) mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std)", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "num_data = 400 means = np.random.randn(num_data, w_dim) encoder = DirectlyParameterizedNormalDiag(num_data,", "0 # test kl > 0 when posteriors != priors", "== (TrackableLayer,) assert type(TrackableLayer) is type assert type(LayerWithObservations) is abc.ABCMeta", "> 0 when posteriors != priors batch_size = 10 params", "############ @pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim()) def test_local_kls(distribution, w_dim): lv = LatentVariableLayer(encoder=None,", "sample_posterior = lv(inputs, observations=[inputs, targets], training=True, seed=seed2) tf.random.set_seed(seed) posterior_expected =", "[1, 5]) @pytest.mark.parametrize(\"seed2\", [None, 42]) def test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2):", "def __init__(self, expected): self.expected = expected def __eq__(self, actual): return", ") q_mu = posteriors.parameters[\"loc\"] q_sqrt = posteriors.parameters[\"scale_diag\"] gpflow_local_kls = gauss_kl(q_mu,", "= LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed) sample_prior = lv(inputs, seed=seed2) tf.random.set_seed(seed) prior_expected", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "= np.full((num_data, y_dim), np.nan) observations = [inputs, targets] encoder_inputs =", "test_latent_variable_layer_losses(mocker, w_dim): num_data, x_dim, y_dim = 43, 3, 1 prior_shape", "axis=-1) _ = lv(inputs) encoder.assert_not_called() assert lv.losses == [0.0] _", "std) distributions.append((mvn, d)) distributions.append((mvn_diag, d)) return distributions ############ # Tests", "ensures that TrackableLayer does not have a metaclass, and hence", "mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed) sample_prior = lv(inputs,", "w_dim): num_data, x_dim, y_dim = 43, 3, 1 prior_shape =", "in params.items() if isinstance(v, np.ndarray) } posterior = lv.distribution_class(**posterior_params) local_kls", "tf.random.set_seed(seed) sample_prior = lv(inputs, seed=seed2) tf.random.set_seed(seed) prior_expected = np.concatenate([inputs, prior.sample(num_data,", "return np.allclose(actual, self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_latent_variable_layer_losses(mocker, w_dim):", "= np.ones(d) mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn, d)) distributions.append((mvn_diag, d))", "posteriors = lv._inference_posteriors( [np.random.randn(num_data, 3), np.random.randn(num_data, 2)] ) q_mu =", "posteriors.parameters[\"scale_diag\"] gpflow_local_kls = gauss_kl(q_mu, q_sqrt) tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls,", "test kl is 0 when posteriors == priors posterior =", "2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(prior=prior, encoder=encoder)", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "distributions ############ # Tests ############ @pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim()) def test_local_kls(distribution,", "LatentVariableLayer(encoder=encoder, prior=prior) inputs = np.full((num_data, x_dim), np.nan) targets = np.full((num_data,", "= 400 means = np.random.randn(num_data, w_dim) encoder = DirectlyParameterizedNormalDiag(num_data, w_dim,", "d in [1, 5]: mean = np.zeros(d) scale_tri_l = np.eye(d)", "42]) def test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2): seed = 123 inputs,", ") posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder", "DirectlyParameterizedNormalDiag(num_data, w_dim, means) lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors = lv._inference_posteriors(", "tf.keras.backend.set_floatx(\"float64\") ############ # Utilities ############ def _zero_one_normal_prior(w_dim): \"\"\" N(0, I)", "__init__(self, expected): self.expected = expected def __eq__(self, actual): return np.allclose(actual,", "400 means = np.random.randn(num_data, w_dim) encoder = DirectlyParameterizedNormalDiag(num_data, w_dim, means)", "or implied. # See the License for the specific language", "(num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2, )", "assert local_kls.shape == (batch_size,) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_local_kl_gpflow_consistency(w_dim): num_data", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "= posteriors.parameters[\"scale_diag\"] gpflow_local_kls = gauss_kl(q_mu, q_sqrt) tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls,", "lv.losses == [0.0] _ = lv(inputs, observations=observations, training=True) # assert_called_once_with", "encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(encoder=encoder, prior=prior) inputs =", "distributions = [] for d in [1, 5]: mean =", "gpflow_local_kls, rtol=1e-10) class ArrayMatcher: def __init__(self, expected): self.expected = expected", "scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim(): distributions = [] for d in [1,", "in [1, 5]: mean = np.zeros(d) scale_tri_l = np.eye(d) mvn", "in range(batch_size)] for k, v in params.items() if isinstance(v, np.ndarray)", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "= LatentVariableLayer(encoder=encoder, prior=prior) inputs = np.full((num_data, x_dim), np.nan) targets =", "The GPflux Contributors. # # Licensed under the Apache License,", "np.testing.assert_equal(lv.losses, expected_loss) # also checks shapes match @pytest.mark.parametrize(\"w_dim\", [1, 5])", "tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag))", "test_data num_data, x_dim = inputs.shape prior_shape = (w_dim,) posteriors_shape =", "encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss) # also checks", "posteriors.parameters[\"loc\"] q_sqrt = posteriors.parameters[\"scale_diag\"] gpflow_local_kls = gauss_kl(q_mu, q_sqrt) tfp_local_kls =", "observations=observations, training=True) # assert_called_once_with uses == for comparison which fails", "@pytest.mark.parametrize(\"seed2\", [None, 42]) def test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2): seed =", "test_local_kls(distribution, w_dim): lv = LatentVariableLayer(encoder=None, prior=distribution) # test kl is", "(the \"License\"); # you may not use this file except", "= DirectlyParameterizedNormalDiag(num_data, w_dim, means) lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors =", "# you may not use this file except in compliance", "is a subclass of tf.keras.layers.Layer (via TrackableLayer); this test ensures", "tf.keras.layers.Layer (via TrackableLayer); this test ensures that TrackableLayer does not", "test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2): seed = 123 inputs, targets =", "= [inputs, targets] encoder_inputs = np.concatenate(observations, axis=-1) _ = lv(inputs)", "(w_dim,) posteriors_shape = (num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape)", "self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_latent_variable_layer_losses(mocker, w_dim): num_data, x_dim,", "rtol=1e-10) class ArrayMatcher: def __init__(self, expected): self.expected = expected def", "0.5 for _ in range(batch_size)] for k, v in params.items()", "from gpflow.kullback_leiblers import gauss_kl from gpflux.encoders import DirectlyParameterizedNormalDiag from gpflux.layers", "class ArrayMatcher: def __init__(self, expected): self.expected = expected def __eq__(self,", "np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10) class ArrayMatcher: def __init__(self, expected): self.expected =", "= 123 inputs, targets = test_data num_data, x_dim = inputs.shape", "# also checks shapes match @pytest.mark.parametrize(\"w_dim\", [1, 5]) @pytest.mark.parametrize(\"seed2\", [None,", "# # Unless required by applicable law or agreed to", "@pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_latent_variable_layer_losses(mocker, w_dim): num_data, x_dim, y_dim =", "np.allclose(actual, self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_latent_variable_layer_losses(mocker, w_dim): num_data,", "tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10) class ArrayMatcher: def __init__(self,", "w_dim, seed2): seed = 123 inputs, targets = test_data num_data,", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "by adding the ABCMeta to LayerWithObservations we are not accidentally", "metaclass. \"\"\" assert LayerWithObservations.__bases__ == (TrackableLayer,) assert type(TrackableLayer) is type", "Version 2.0 (the \"License\"); # you may not use this", "_ in range(batch_size)] for k, v in params.items() if isinstance(v,", "tf.random.set_seed(seed) prior_expected = np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed)", "get_distributions_with_w_dim()) def test_local_kls(distribution, w_dim): lv = LatentVariableLayer(encoder=None, prior=distribution) # test", "equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_latent_variable_layer_losses(mocker, w_dim): num_data, x_dim, y_dim", "LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############ # Utilities ############ def _zero_one_normal_prior(w_dim): \"\"\"", "that TrackableLayer does not have a metaclass, and hence by", "** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(prior=prior,", "__eq__(self, actual): return np.allclose(actual, self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def", "implied. # See the License for the specific language governing", "# Copyright (c) 2021 The GPflux Contributors. # # Licensed", "under the Apache License, Version 2.0 (the \"License\"); # you", "local_kls = lv._local_kls(posterior) assert np.all(local_kls > 0) assert local_kls.shape ==", "accidentally removing some required TensorFlow magic metaclass. \"\"\" assert LayerWithObservations.__bases__", "############ # Utilities ############ def _zero_one_normal_prior(w_dim): \"\"\" N(0, I) prior", "_ = lv(inputs) encoder.assert_not_called() assert lv.losses == [0.0] _ =", "arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss) # also", "by applicable law or agreed to in writing, software #", "= lv.distribution_class(**posterior_params) local_kls = lv._local_kls(posterior) assert np.all(local_kls > 0) assert", "lv(inputs, observations=observations, training=True) # assert_called_once_with uses == for comparison which", "w_dim, means) lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors = lv._inference_posteriors( [np.random.randn(num_data,", "import DirectlyParameterizedNormalDiag from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############", "mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(encoder=encoder, prior=prior) inputs = np.full((num_data, x_dim),", "= np.full((num_data, x_dim), np.nan) targets = np.full((num_data, y_dim), np.nan) observations", "# test kl > 0 when posteriors != priors batch_size", "to LayerWithObservations we are not accidentally removing some required TensorFlow", "License. # import abc import numpy as np import pytest", "from gpflux.encoders import DirectlyParameterizedNormalDiag from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer", "Contributors. # # Licensed under the Apache License, Version 2.0", "pytest import tensorflow as tf import tensorflow_probability as tfp from", "tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn, d)) distributions.append((mvn_diag, d)) return distributions ############ #", "and hence by adding the ABCMeta to LayerWithObservations we are", "LatentVariableLayer(encoder=None, prior=distribution) # test kl is 0 when posteriors ==", "prior_expected = np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed) sample_posterior", "expected_loss) # also checks shapes match @pytest.mark.parametrize(\"w_dim\", [1, 5]) @pytest.mark.parametrize(\"seed2\",", "= LatentVariableLayer(encoder=None, prior=distribution) # test kl is 0 when posteriors", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "[1, 5]) def test_latent_variable_layer_losses(mocker, w_dim): num_data, x_dim, y_dim = 43,", "Unless required by applicable law or agreed to in writing,", "the ABCMeta to LayerWithObservations we are not accidentally removing some", "np.random.randn(num_data, 2)] ) q_mu = posteriors.parameters[\"loc\"] q_sqrt = posteriors.parameters[\"scale_diag\"] gpflow_local_kls", "as tf import tensorflow_probability as tfp from gpflow.kullback_leiblers import gauss_kl", "[np.random.randn(num_data, 3), np.random.randn(num_data, 2)] ) q_mu = posteriors.parameters[\"loc\"] q_sqrt =", "the specific language governing permissions and # limitations under the", "fails on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss)", "q_mu = posteriors.parameters[\"loc\"] q_sqrt = posteriors.parameters[\"scale_diag\"] gpflow_local_kls = gauss_kl(q_mu, q_sqrt)", "seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed) sample_posterior = lv(inputs, observations=[inputs, targets],", "applicable law or agreed to in writing, software # distributed", "np.eye(d) mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std = np.ones(d) mvn_diag =", "np.ones(d) mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn, d)) distributions.append((mvn_diag, d)) return", "scale_tri_l = np.eye(d) mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std = np.ones(d)", "test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations is a subclass of tf.keras.layers.Layer (via TrackableLayer);", "not have a metaclass, and hence by adding the ABCMeta", "lv = LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed) sample_prior = lv(inputs, seed=seed2) tf.random.set_seed(seed)", "posteriors.scale.diag)) lv = LatentVariableLayer(encoder=encoder, prior=prior) inputs = np.full((num_data, x_dim), np.nan)", "posteriors != priors batch_size = 10 params = distribution.parameters posterior_params", "training=True) expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss) # also checks shapes", "np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed) sample_posterior = lv(inputs, observations=[inputs, targets], training=True, seed=seed2)", "num_data, x_dim, y_dim = 43, 3, 1 prior_shape = (w_dim,)", "in writing, software # distributed under the License is distributed", "for d in [1, 5]: mean = np.zeros(d) scale_tri_l =", "Utilities ############ def _zero_one_normal_prior(w_dim): \"\"\" N(0, I) prior \"\"\" return", "required TensorFlow magic metaclass. \"\"\" assert LayerWithObservations.__bases__ == (TrackableLayer,) assert", "some required TensorFlow magic metaclass. \"\"\" assert LayerWithObservations.__bases__ == (TrackableLayer,)", "TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############ # Utilities ############ def _zero_one_normal_prior(w_dim): \"\"\" N(0,", "2, ) posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, )", "= distribution assert lv._local_kls(posterior) == 0 # test kl >", "k, v in params.items() if isinstance(v, np.ndarray) } posterior =", "[0.0] _ = lv(inputs, observations=observations, training=True) # assert_called_once_with uses ==", "tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10) class ArrayMatcher: def __init__(self, expected): self.expected", "== for comparison which fails on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss", "gauss_kl from gpflux.encoders import DirectlyParameterizedNormalDiag from gpflux.layers import LatentVariableLayer, LayerWithObservations,", "# Tests ############ @pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim()) def test_local_kls(distribution, w_dim): lv", "1 prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim) prior =", "when posteriors != priors batch_size = 10 params = distribution.parameters", "= LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors = lv._inference_posteriors( [np.random.randn(num_data, 3), np.random.randn(num_data, 2)]", "targets], training=True, seed=seed2) tf.random.set_seed(seed) posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior,", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "import numpy as np import pytest import tensorflow as tf", "License, Version 2.0 (the \"License\"); # you may not use", "mean = np.zeros(d) scale_tri_l = np.eye(d) mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l)", "# You may obtain a copy of the License at", "means) lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors = lv._inference_posteriors( [np.random.randn(num_data, 3),", "seed = 123 inputs, targets = test_data num_data, x_dim =", "num_data, x_dim = inputs.shape prior_shape = (w_dim,) posteriors_shape = (num_data,", "lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors = lv._inference_posteriors( [np.random.randn(num_data, 3), np.random.randn(num_data,", "= lv(inputs, observations=observations, training=True) # assert_called_once_with uses == for comparison", "# Utilities ############ def _zero_one_normal_prior(w_dim): \"\"\" N(0, I) prior \"\"\"", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "training=True) # assert_called_once_with uses == for comparison which fails on", "we are not accidentally removing some required TensorFlow magic metaclass.", "= [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss) # also checks shapes match @pytest.mark.parametrize(\"w_dim\",", "training=True, seed=seed2) tf.random.set_seed(seed) posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected)", "import abc import numpy as np import pytest import tensorflow", "np.testing.assert_array_equal(sample_posterior, posterior_expected) def test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations is a subclass of", "np.random.randn(num_data, w_dim) encoder = DirectlyParameterizedNormalDiag(num_data, w_dim, means) lv = LatentVariableLayer(encoder=encoder,", "= np.random.randn(num_data, w_dim) encoder = DirectlyParameterizedNormalDiag(num_data, w_dim, means) lv =", "x_dim, y_dim = 43, 3, 1 prior_shape = (w_dim,) posteriors_shape", "prior=distribution) # test kl is 0 when posteriors == priors", "posteriors_shape = (num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) **", "the License for the specific language governing permissions and #", "LayerWithObservations we are not accidentally removing some required TensorFlow magic", "Apache License, Version 2.0 (the \"License\"); # you may not", "either express or implied. # See the License for the", "checks shapes match @pytest.mark.parametrize(\"w_dim\", [1, 5]) @pytest.mark.parametrize(\"seed2\", [None, 42]) def", "def test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2): seed = 123 inputs, targets", "def _zero_one_normal_prior(w_dim): \"\"\" N(0, I) prior \"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim))", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "prior=_zero_one_normal_prior(w_dim)) posteriors = lv._inference_posteriors( [np.random.randn(num_data, 3), np.random.randn(num_data, 2)] ) q_mu", "have a metaclass, and hence by adding the ABCMeta to", "mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std = np.ones(d) mvn_diag = tfp.distributions.MultivariateNormalDiag(mean,", "subclass of tf.keras.layers.Layer (via TrackableLayer); this test ensures that TrackableLayer", "np.concatenate(observations, axis=-1) _ = lv(inputs) encoder.assert_not_called() assert lv.losses == [0.0]", "= [] for d in [1, 5]: mean = np.zeros(d)", "expected): self.expected = expected def __eq__(self, actual): return np.allclose(actual, self.expected,", "encoder = DirectlyParameterizedNormalDiag(num_data, w_dim, means) lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors", "N(0, I) prior \"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim(): distributions", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "_zero_one_normal_prior(w_dim): \"\"\" N(0, I) prior \"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def", "kl is 0 when posteriors == priors posterior = distribution", "= distribution.parameters posterior_params = { k: [v + 0.5 for", "posterior = distribution assert lv._local_kls(posterior) == 0 # test kl", "seed=seed2) tf.random.set_seed(seed) posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected) def", "the License. # import abc import numpy as np import", "for comparison which fails on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss =", "43, 3, 1 prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim)", "removing some required TensorFlow magic metaclass. \"\"\" assert LayerWithObservations.__bases__ ==", "= np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected) def test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations", "10 params = distribution.parameters posterior_params = { k: [v +", "lv = LatentVariableLayer(encoder=None, prior=distribution) # test kl is 0 when", "x_dim), np.nan) targets = np.full((num_data, y_dim), np.nan) observations = [inputs,", "np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected) def test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations is", "under the License. # import abc import numpy as np", "magic metaclass. \"\"\" assert LayerWithObservations.__bases__ == (TrackableLayer,) assert type(TrackableLayer) is", "\"License\"); # you may not use this file except in", "= 43, 3, 1 prior_shape = (w_dim,) posteriors_shape = (num_data,", "import pytest import tensorflow as tf import tensorflow_probability as tfp", "loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "0) assert local_kls.shape == (batch_size,) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_local_kl_gpflow_consistency(w_dim):", "actual): return np.allclose(actual, self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_latent_variable_layer_losses(mocker,", "tensorflow as tf import tensorflow_probability as tfp from gpflow.kullback_leiblers import", "assert np.all(local_kls > 0) assert local_kls.shape == (batch_size,) @pytest.mark.parametrize(\"w_dim\", [1,", "q_sqrt) tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10) class ArrayMatcher: def", "[1, 5]) def test_local_kl_gpflow_consistency(w_dim): num_data = 400 means = np.random.randn(num_data,", "# distributed under the License is distributed on an \"AS", "w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2, ) posteriors", "gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############ # Utilities ############", "!= priors batch_size = 10 params = distribution.parameters posterior_params =", "does not have a metaclass, and hence by adding the", "# Unless required by applicable law or agreed to in", "= gauss_kl(q_mu, q_sqrt) tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10) class", "@pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim()) def test_local_kls(distribution, w_dim): lv = LatentVariableLayer(encoder=None, prior=distribution)", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "prior.sample(num_data, seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed) sample_posterior = lv(inputs, observations=[inputs,", "Copyright (c) 2021 The GPflux Contributors. # # Licensed under", "gpflow.kullback_leiblers import gauss_kl from gpflux.encoders import DirectlyParameterizedNormalDiag from gpflux.layers import", "seed2): seed = 123 inputs, targets = test_data num_data, x_dim", "You may obtain a copy of the License at #", "posteriors.scale.diag)) lv = LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed) sample_prior = lv(inputs, seed=seed2)", "= np.zeros(d) scale_tri_l = np.eye(d) mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std", "isinstance(v, np.ndarray) } posterior = lv.distribution_class(**posterior_params) local_kls = lv._local_kls(posterior) assert", "** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(encoder=encoder,", "[None, 42]) def test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2): seed = 123", "import tensorflow as tf import tensorflow_probability as tfp from gpflow.kullback_leiblers", "[tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss) # also checks shapes match @pytest.mark.parametrize(\"w_dim\", [1,", "w_dim): lv = LatentVariableLayer(encoder=None, prior=distribution) # test kl is 0", "I) prior \"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim(): distributions =", "the Apache License, Version 2.0 (the \"License\"); # you may", "batch_size = 10 params = distribution.parameters posterior_params = { k:", "prior=prior) inputs = np.full((num_data, x_dim), np.nan) targets = np.full((num_data, y_dim),", "distribution assert lv._local_kls(posterior) == 0 # test kl > 0", "assert lv._local_kls(posterior) == 0 # test kl > 0 when", "w_dim\", get_distributions_with_w_dim()) def test_local_kls(distribution, w_dim): lv = LatentVariableLayer(encoder=None, prior=distribution) #" ]
[ "PUT /trust/{relationship}}/{actorid} with a json body to change details on", "to create trust relationship') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id", "myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if not deleted: self.response.set_status(502, 'Not able to delete", "pair = { 'baseuri': my_trust.baseuri, 'id': myself.id, 'peerid': my_trust.peerid, 'relationship':", "myself.id, 'peerid': my_trust.peerid, 'relationship': my_trust.relationship, 'approved': my_trust.approved, 'peer_approved': my_trust.peer_approved, 'verified':", "admins allowed) # POST /trust/{relationship} with json body to create", "# Access is the same as /trust if not check.checkAuthorisation(path='trust',", "'/trust/' + new_trust.relationship + '/' + new_trust.peerid)) pair = {", "self.response.set_status(403) return myself.deleteProperty('trustee_root') self.response.set_status(204, 'No content') def post(self, id, relationship):", "if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET', peerid=peerid): self.response.set_status(403) return relationships =", "check.trust and check.trust.peerid == peerid: isPeer = True else: #", "'No content') def delete(self, id, relationship): (Config, myself, check) =", "if not new_trust: self.response.set_status(408, 'Unable to create trust relationship') return", "= params['baseuri'] else: baseuri = '' if 'id' in params:", "trustHandler(webapp2.RequestHandler): def get(self, id, relationship, peerid): if self.request.get('_method') == 'PUT':", "self.response.set_status(400, 'Missing peer URL') return secret = Config.newToken() new_trust =", "if self.request.get('desc') and len(self.request.get('desc')) > 0: desc = self.request.get('desc') else:", "secret) # POST /trust/{relationship}}/{actorid} to send information to a peer", "id=id, path='trust') if not myself or check.response[\"code\"] != 200: return", "'/' + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id': myself.id,", "if 'desc' in params: desc = params['desc'] except ValueError: url", "if self.request.get('_method') == 'POST': self.post(id, relationship) return self.response.set_status(404, \"Not found\")", "to create new trust # relationship (see config.py for default", "self.response.set_status(403) return relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships:", "len(peerid) == 0 or len(type) == 0: self.response.set_status(400, 'Missing mandatory", "'Accepted') def post(self, id, relationship, peerid): (Config, myself, check) =", "subpath=relationship, add_response=False) if not myself: return if relationship != 'trustee':", "\"application/json\" if approved: self.response.set_status(201, 'Created') else: self.response.set_status(202, 'Accepted') # Handling", "and params['approved'] == True: peer_approved = True except ValueError: self.response.set_status(400,", "admin, or peer secret) # DELETE /trust/{relationship}}/{actorid} to delete a", "\"Location\", str(Config.root + myself.id + '/trust/' + new_trust.relationship + \"/\"", "'peer_approved': rel.peer_approved, 'verified': rel.verified, 'type': rel.type, 'desc': rel.desc, 'secret': rel.secret,", "method='POST'): self.response.set_status(403) return secret = '' desc = '' relationship", "/trust/{relationship}}/{actorid} to send information to a peer about changes in", "add_response=False) if not myself: return if not check.checkAuthorisation(path='trust', subpath='<type>', method='POST'):", "self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if approved: self.response.set_status(201, 'Created') else: self.response.set_status(202,", "Config.auto_accept_default_relationship and Config.default_relationship == relationship: approved = True else: approved", "return (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if not", "def get(self, id, relationship): if self.request.get('_method') == 'POST': self.post(id, relationship)", "+ myself.id + '/trust/' + new_trust.relationship + \"/\" + new_trust.peerid))", "path='trust', subpath=relationship, add_response=False) if not myself or (check.response[\"code\"] != 200", "'secret': rel.secret, }) out = json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\"", "'approved': rel.approved, 'peer_approved': rel.peer_approved, 'verified': rel.verified, 'type': rel.type, 'desc': rel.desc,", "True except ValueError: self.response.set_status(400, 'No json content') return if myself.modifyTrustAndNotify(relationship=relationship,", "from actingweb import actor from actingweb import config from actingweb", "type = '' try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'url'", "Handling requests to specific relationships, e.g. /trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler): def", "self.response.headers[\"Content-Type\"] = \"application/json\" if my_trust.approved: self.response.set_status(200, 'Ok') else: self.response.set_status(202, 'Accepted')", "params = json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved = None if 'approved' in", "headers: ' + str(self.request.headers)) (Config, myself, check) = auth.init_actingweb(appreq=self, id=id,", "import actor from actingweb import config from actingweb import trust", "auth_obj=check) return # We allow non-approved peers to delete even", "Config.default_relationship type = '' try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if", "did a GET to verify if check.trust and check.trust.peerid ==", "# # GET /trust with query parameters (relationship, type, and", "the peer) (auth: creator, admin, or # peer secret) #", "0: desc = self.request.get('desc') else: desc = '' if myself.modifyTrustAndNotify(relationship=relationship,", "= self.request.get('peer').lower() if peerGet.lower() == \"true\": isPeer = True Config", "'No json content') return if len(baseuri) == 0 or len(peerid)", "peerid=peerid, approved=approved, secret=secret, verificationToken=verificationToken, type=type, peer_approved=True, relationship=relationship, desc=desc) if not", "subpath='<type>/<id>', method='DELETE', peerid=peerid, approved=False): self.response.set_status(403) return isPeer = False if", "'approved' in params: if params['approved'] and params['approved'] == True: peer_approved", "initiate a trust relationship between this # actor and another", "/trust if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return try: params =", "= auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself or", "actingweb import trust from actingweb import auth import webapp2 import", "parameters (relationship, type, and peerid) to retrieve trust relationships (auth:", "params['desc'] else: desc = '' if 'verify' in params: verificationToken", "\"application/json\" self.response.set_status(201, 'Created') # Handling requests to /trust/*, e.g. /trust/friend", "def put(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id,", "params: trustee_root = params['trustee_root'] else: trustee_root = '' if 'creator'", "= '' if 'type' in params: type = params['type'] else:", "import time # /trust handlers # # GET /trust with", "not new_trust: self.response.set_status(403, 'Forbidden') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id", "ValueError: if not self.request.get('_method') or self.request.get('_method') != \"PUT\": self.response.set_status(400, 'No", "if not myself or check.response[\"code\"] != 200: return if not", "relationship=relationship, peerid=peerid, type=type) if not relationships: self.response.set_status(404, 'Not found') return", "== 0 or len(type) == 0: self.response.set_status(400, 'Missing mandatory attributes')", "to delete even if we haven't approved the relationship yet", "e.g. /trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler): def get(self, id, relationship, peerid): if", "== True: peer_approved = True except ValueError: self.response.set_status(400, 'No json", "relationship (with # ?peer=true if the delete is from the", "Config.newToken() new_trust = myself.createReciprocalTrust( url=url, secret=secret, desc=desc, relationship=relationship, type=type) if", "= '' if 'desc' in params: desc = params['desc'] else:", "out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if my_trust.approved: self.response.set_status(200,", "found\") return # Access is the same as /trust if", "check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself", "or self.request.get('_method') != \"PUT\": self.response.set_status(400, 'No json content') return if", "params['desc'] except ValueError: url = self.request.get('url') relationship = self.request.get('relationship') type", "if not check.checkAuthorisation(path='trust', subpath='<type>', method='POST'): self.response.set_status(403) return try: params =", "= '' if 'relationship' in params: relationship = params['relationship'] if", "peer about changes in the relationship # PUT /trust/{relationship}}/{actorid} with", "myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not", "= True else: approved = None except ValueError: if not", "params['verify'] else: verificationToken = None except ValueError: self.response.set_status(400, 'No json", "True: peer_approved = True except ValueError: self.response.set_status(400, 'No json content')", "secret = params['secret'] else: secret = '' if 'desc' in", "return secret = Config.newToken() new_trust = myself.createReciprocalTrust( url=url, secret=secret, desc=desc,", "auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself: return if", "self.response.set_status(400, 'No json content') return if len(baseuri) == 0 or", "relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False)", "len(type) == 0: self.response.set_status(400, 'Missing mandatory attributes') return if Config.auto_accept_default_relationship", "'id': myself.id, 'peerid': new_trust.peerid, 'relationship': new_trust.relationship, 'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved,", "json body to change details on a relationship (baseuri, secret,", "found') return pairs = [] for rel in relationships: pairs.append({", "= params['baseuri'] else: baseuri = '' if 'desc' in params:", "+ str(self.request.headers)) (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship)", "# DELETE /trust/{relationship}}/{actorid} to delete a relationship (with # ?peer=true", "json.loads(self.request.body.decode('utf-8', 'ignore')) if 'trustee_root' in params: trustee_root = params['trustee_root'] else:", "return if not check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403) return relationship = ''", "else: self.response.set_status(405, 'Not modified') def put(self, id, relationship, peerid): (Config,", "actingweb import actor from actingweb import config from actingweb import", "'Not found') return my_trust = relationships[0] if isPeer: deleted =", "'Ok') else: self.response.set_status(202, 'Accepted') def post(self, id, relationship, peerid): (Config,", "if isPeer: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else: deleted = myself.deleteReciprocalTrust(peerid=peerid,", "id): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if not", "params: baseuri = params['baseuri'] else: baseuri = '' if 'id'", "GET to verify if check.trust and check.trust.peerid == peerid and", "in params: if params['approved'] and params['approved'] == True: peer_approved =", "import logging import datetime import time # /trust handlers #", "DELETE /trust/{relationship}}/{actorid} to delete a relationship (with # ?peer=true if", "if self.request.get('_method') == 'PUT': self.put(id, relationship, peerid) return if self.request.get('_method')", "params['id'] else: peerid = '' if 'type' in params: type", "200: return if not check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403) return relationship =", "else: type = '' if 'secret' in params: secret =", "json.loads(self.request.body.decode('utf-8', 'ignore')) if 'url' in params: url = params['url'] else:", "and len(self.request.get('approved')) > 0: if self.request.get('approved').lower() == \"true\": approved =", "= relationships[0] if isPeer: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else: deleted", "relationship with peer.') return self.response.set_status(204, 'Ok') application = webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>',", "not check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403) return myself.deleteProperty('trustee_root') self.response.set_status(204, 'No content') def", "def get(self, id): if self.request.get('_method') == 'POST': self.post(id) return (Config,", "myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved, secret=secret, verificationToken=verificationToken, type=type, peer_approved=True, relationship=relationship, desc=desc) if", "'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid, 'relationship': new_trust.relationship, 'approved': new_trust.approved,", "= '' relationship = Config.default_relationship type = '' try: params", "if self.request.get('baseuri') and len(self.request.get('baseuri')) > 0: baseuri = self.request.get('baseuri') else:", "new_trust.relationship, 'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified, 'type': new_trust.type, 'desc':", "request for a relationship, assume that peer has approved new_trust", "'' if 'creator' in params: creator = params['creator'] else: creator", "for testing purposes) peerGet = self.request.get('peer').lower() if peerGet.lower() == \"true\":", "+ new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid':", "relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid, type=type) if not relationships: self.response.set_status(404,", "trust/ class rootHandler(webapp2.RequestHandler): def get(self, id): if self.request.get('_method') == 'POST':", "'ignore')) peer_approved = None if 'approved' in params: if params['approved']", "if not myself: return if relationship != 'trustee': self.response.set_status(404, \"Not", "as /trust if not check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403) return myself.deleteProperty('trustee_root') self.response.set_status(204,", "the relationship # PUT /trust/{relationship}}/{actorid} with a json body to", "(auth: creator, # admin, or peer secret) # DELETE /trust/{relationship}}/{actorid}", "= json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(200, 'Ok') def post(self,", "'desc' in params: desc = params['desc'] except ValueError: url =", "my_trust.type, 'desc': my_trust.desc, 'secret': my_trust.secret, } out = json.dumps(pair) self.response.write(out)", "my_trust.approved: self.response.set_status(200, 'Ok') else: self.response.set_status(202, 'Accepted') def post(self, id, relationship,", "modified') def delete(self, id, relationship, peerid): (Config, myself, check) =", "self.request.get('url') relationship = self.request.get('relationship') type = self.request.get('type') if len(url) ==", "add_response=False) if not myself or (check.response[\"code\"] != 200 and check.response[\"code\"]", "return myself.deleteProperty('trustee_root') self.response.set_status(204, 'No content') def post(self, id, relationship): (Config,", "baseuri = params['baseuri'] else: baseuri = '' if 'desc' in", "or len(peerid) == 0 or len(type) == 0: self.response.set_status(400, 'Missing", "not relationships: self.response.set_status(404, 'Not found') return my_trust = relationships[0] #", "len(baseuri) == 0 or len(peerid) == 0 or len(type) ==", "self.request.get('relationship') type = self.request.get('type') peerid = self.request.get('peerid') relationships = myself.getTrustRelationships(", "= params['relationship'] if 'type' in params: type = params['type'] if", "else: secret = '' if 'desc' in params: desc =", "actor and another (reciprocal relationship) (auth: only creator and admins", "myself or (check.response[\"code\"] != 200 and check.response[\"code\"] != 401): auth.add_auth_response(appreq=self,", "desc=desc, relationship=relationship, type=type) if not new_trust: self.response.set_status(408, 'Unable to create", "== 'DELETE': self.delete(id, relationship, peerid) return logging.debug('GET trust headers: '", "check.checkAuthorisation(path='trust', subpath='<type>', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore'))", "'desc': new_trust.desc, 'secret': new_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"]", "method='GET'): self.response.set_status(403) return relationship = '' type = '' peerid", "= { 'baseuri': my_trust.baseuri, 'id': myself.id, 'peerid': my_trust.peerid, 'relationship': my_trust.relationship,", "'Created') # Handling requests to /trust/*, e.g. /trust/friend class relationshipHandler(webapp2.RequestHandler):", "'Not modified') def put(self, id, relationship, peerid): (Config, myself, check)", "= params['verify'] else: verificationToken = None except ValueError: self.response.set_status(400, 'No", "# peer secret) # Handling requests to trust/ class rootHandler(webapp2.RequestHandler):", "deletePeer=False) else: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if not deleted: self.response.set_status(502,", "even when requestor is not a peer (primarily for testing", "method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'trustee_root'", "is from the peer) (auth: creator, admin, or # peer", "!= 200: return if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return secret", "relationship != 'trustee': self.response.set_status(404, \"Not found\") return # Access is", "None if 'approved' in params: if params['approved'] and params['approved'] ==", "params['approved'] == True: peer_approved = True except ValueError: self.response.set_status(400, 'No", "len(self.request.get('desc')) > 0: desc = self.request.get('desc') else: desc = ''", "url = self.request.get('url') relationship = self.request.get('relationship') type = self.request.get('type') if", "subpath='<type>', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if", "= '' if 'verify' in params: verificationToken = params['verify'] else:", "subpath=relationship) if not myself or check.response[\"code\"] != 200: return if", "\"true\": approved = True else: approved = None if self.request.get('baseuri')", "'type': rel.type, 'desc': rel.desc, 'secret': rel.secret, }) out = json.dumps(pairs)", "not relationships: self.response.set_status(404, 'Not found') return pairs = [] for", "the peer did a GET to verify if check.trust and", "len(self.request.get('approved')) > 0: if self.request.get('approved').lower() == \"true\": approved = True", "to a peer about changes in the relationship # PUT", "self.response.set_status(204, 'Ok') application = webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler, name='rootHandler'), webapp2.Route(r'/<id>/trust/<relationship><:/?>', relationshipHandler,", "from actingweb import trust from actingweb import auth import webapp2", "id=id, path='trust', subpath=relationship, add_response=False) if not myself: return if not", "'' if 'type' in params: type = params['type'] else: type", "or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST',", "json import logging import datetime import time # /trust handlers", "creator = params['creator'] else: creator = None except ValueError: self.response.set_status(400,", "= auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself: return", "id=id, path='trust', subpath=relationship) if not myself or check.response[\"code\"] != 200:", "== 'POST': self.post(id) return (Config, myself, check) = auth.init_actingweb(appreq=self, id=id,", "!= 401): auth.add_auth_response(appreq=self, auth_obj=check) return # We allow non-approved peers", "my_trust = relationships[0] if isPeer: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else:", "if 'relationship' in params: relationship = params['relationship'] if 'type' in", "params: url = params['url'] else: url = '' if 'relationship'", "if not myself or (check.response[\"code\"] != 200 and check.response[\"code\"] !=", "\"application/json\" if my_trust.approved: self.response.set_status(200, 'Ok') else: self.response.set_status(202, 'Accepted') def post(self,", "= params['type'] if 'desc' in params: desc = params['desc'] except", "'secret' in params: secret = params['secret'] else: secret = ''", "= json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if approved: self.response.set_status(201, 'Created')", "str(self.request.headers)) (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if", "logging import datetime import time # /trust handlers # #", "= json.loads(self.request.body.decode('utf-8', 'ignore')) if 'trustee_root' in params: trustee_root = params['trustee_root']", "if 'desc' in params: desc = params['desc'] else: desc =", "= None if self.request.get('baseuri') and len(self.request.get('baseuri')) > 0: baseuri =", "'/trust/' + new_trust.relationship + \"/\" + new_trust.peerid)) pair = {", "requests to trust/ class rootHandler(webapp2.RequestHandler): def get(self, id): if self.request.get('_method')", "not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT', peerid=peerid): self.response.set_status(403) return try: params =", "'' peerid = '' relationship = self.request.get('relationship') type = self.request.get('type')", "= params['id'] else: peerid = '' if 'type' in params:", "relationships: self.response.set_status(404, 'Not found') return pairs = [] for rel", "secret) # Handling requests to trust/ class rootHandler(webapp2.RequestHandler): def get(self,", "pairs.append({ 'baseuri': rel.baseuri, 'id': myself.id, 'peerid': rel.peerid, 'relationship': rel.relationship, 'approved':", "create new trust # relationship (see config.py for default relationship", "time # /trust handlers # # GET /trust with query", "'' try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'url' in params:", "{ 'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid, 'relationship': new_trust.relationship, 'approved':", "'Missing mandatory attributes') return if Config.auto_accept_default_relationship and Config.default_relationship == relationship:", "changes in the relationship # PUT /trust/{relationship}}/{actorid} with a json", "import os from google.appengine.ext.webapp import template import json import logging", "params: relationship = params['relationship'] if 'type' in params: type =", "= myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404, 'Not found')", "= True else: approved = None if self.request.get('baseuri') and len(self.request.get('baseuri'))", "peerid=peerid, approved=False): self.response.set_status(403) return isPeer = False if check.trust and", "relationship = params['relationship'] if 'type' in params: type = params['type']", "+ myself.id + '/trust/' + new_trust.relationship + '/' + new_trust.peerid))", "def post(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id,", "params: desc = params['desc'] except ValueError: url = self.request.get('url') relationship", "relationship) return self.response.set_status(404, \"Not found\") def put(self, id, relationship): (Config,", "approved = False # Since we received a request for", "path='trust', subpath=relationship) if not myself or check.response[\"code\"] != 200: return", "peerid=peerid, baseuri=baseuri, approved=approved, desc=desc): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified')", "json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in params: baseuri = params['baseuri'] else:", "(auth: creator, admin, or peer secret) # POST /trust/{relationship}}/{actorid} to", "return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'trustee_root' in params:", "json content') return if self.request.get('approved') and len(self.request.get('approved')) > 0: if", "to send information to a peer about changes in the", "in params: baseuri = params['baseuri'] else: baseuri = '' if", "len(self.request.get('baseuri')) > 0: baseuri = self.request.get('baseuri') else: baseuri = ''", "relationship = '' type = '' peerid = '' relationship", "relationship') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/' +", "'url' in params: url = params['url'] else: url = ''", "os from google.appengine.ext.webapp import template import json import logging import", "approved = True else: approved = None if self.request.get('baseuri') and", "/trust with query parameters (relationship, type, and peerid) to retrieve", "/trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler): def get(self, id, relationship, peerid): if self.request.get('_method')", "return if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return secret = ''", "self.request.get('baseuri') else: baseuri = '' if self.request.get('desc') and len(self.request.get('desc')) >", "params: secret = params['secret'] else: secret = '' if 'desc'", "= False if check.trust and check.trust.peerid == peerid: isPeer =", "self.request.get('_method') == 'DELETE': self.delete(id, relationship, peerid) return logging.debug('GET trust headers:", "self.response.set_status(400, 'No json content') return if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved): self.response.set_status(204,", "'No json content') return if len(trustee_root) > 0: myself.setProperty('trustee_root', trustee_root)", "if relationship != 'trustee': self.response.set_status(404, \"Not found\") return # Access", "params['baseuri'] else: baseuri = '' if 'desc' in params: desc", "specific relationships, e.g. /trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler): def get(self, id, relationship,", "If the peer did a GET to verify if check.trust", "rel.baseuri, 'id': myself.id, 'peerid': rel.peerid, 'relationship': rel.relationship, 'approved': rel.approved, 'peer_approved':", "'Not able to delete relationship with peer.') return self.response.set_status(204, 'Ok')", "return if len(trustee_root) > 0: myself.setProperty('trustee_root', trustee_root) if creator: myself.modify(creator=creator)", "params['secret'] else: secret = '' if 'desc' in params: desc", "if not check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403) return myself.deleteProperty('trustee_root') self.response.set_status(204, 'No content')", "'Not found') return pairs = [] for rel in relationships:", "subpath='<type>/<id>', method='POST', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore'))", "return logging.debug('GET trust headers: ' + str(self.request.headers)) (Config, myself, check)", "type = self.request.get('type') if len(url) == 0: self.response.set_status(400, 'Missing peer", "self.request.get('type') if len(url) == 0: self.response.set_status(400, 'Missing peer URL') return", "delete(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id,", "!= 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT', peerid=peerid): self.response.set_status(403)", "self.request.get('_method') or self.request.get('_method') != \"PUT\": self.response.set_status(400, 'No json content') return", "if 'baseuri' in params: baseuri = params['baseuri'] else: baseuri =", "False if check.trust and check.trust.peerid == peerid: isPeer = True", "self.request.get('relationship') type = self.request.get('type') if len(url) == 0: self.response.set_status(400, 'Missing", "self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(200, 'Ok') def post(self, id): (Config, myself,", "= '' desc = '' relationship = Config.default_relationship type =", "has approved new_trust = myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved, secret=secret, verificationToken=verificationToken, type=type,", "self.response.headers[\"Content-Type\"] = \"application/json\" if approved: self.response.set_status(201, 'Created') else: self.response.set_status(202, 'Accepted')", "self.response.set_status(404, 'Not found') return my_trust = relationships[0] # If the", "check.trust.peerid == peerid and not my_trust.verified: my_trust.modify(verified=True) verificationToken = my_trust.verificationToken", "else: baseuri = '' if 'desc' in params: desc =", "# POST /trust/{relationship}}/{actorid} to send information to a peer about", "my_trust = relationships[0] # If the peer did a GET", "in params: type = params['type'] if 'desc' in params: desc", "in params: desc = params['desc'] else: desc = '' if", "'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified, 'type': new_trust.type, 'desc': new_trust.desc, 'secret': new_trust.secret,", "get(self, id, relationship): if self.request.get('_method') == 'POST': self.post(id, relationship) return", "'desc': my_trust.desc, 'secret': my_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"]", "self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in", "# Use of GET param peer=true is a way of", "a peer # relationship even when requestor is not a", "if len(url) == 0: self.response.set_status(400, 'Missing peer URL') return secret", "= '' if 'secret' in params: secret = params['secret'] else:", "else: approved = None except ValueError: if not self.request.get('_method') or", "check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT', peerid=peerid):", "return self.response.set_status(204, 'Ok') application = webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler, name='rootHandler'), webapp2.Route(r'/<id>/trust/<relationship><:/?>',", "= json.loads(self.request.body.decode('utf-8', 'ignore')) if 'url' in params: url = params['url']", "'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified, 'type': new_trust.type, 'desc': new_trust.desc,", "= myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved, secret=secret, verificationToken=verificationToken, type=type, peer_approved=True, relationship=relationship, desc=desc)", "to delete relationship with peer.') return self.response.set_status(204, 'Ok') application =", "approved: self.response.set_status(201, 'Created') else: self.response.set_status(202, 'Accepted') # Handling requests to", "creator: myself.modify(creator=creator) self.response.set_status(204, 'No content') def delete(self, id, relationship): (Config,", "and check.response[\"code\"] != 401): auth.add_auth_response(appreq=self, auth_obj=check) return # We allow", "== peerid and not my_trust.verified: my_trust.modify(verified=True) verificationToken = my_trust.verificationToken else:", "Config = config.config() relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not", "creator, admin, or peer secret) # POST /trust/{relationship}}/{actorid} to send", "'' relationship = self.request.get('relationship') type = self.request.get('type') peerid = self.request.get('peerid')", "myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri, approved=approved, desc=desc): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not", "new_trust.type, 'desc': new_trust.desc, 'secret': new_trust.secret, } out = json.dumps(pair) self.response.write(out)", "'relationship': rel.relationship, 'approved': rel.approved, 'peer_approved': rel.peer_approved, 'verified': rel.verified, 'type': rel.type,", "peer_approved = True except ValueError: self.response.set_status(400, 'No json content') return", "desc = params['desc'] else: desc = '' if 'verify' in", "check.response[\"code\"] != 401): auth.add_auth_response(appreq=self, auth_obj=check) return # We allow non-approved", "check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return secret = '' desc = ''", "approved = True else: approved = None except ValueError: if", "if 'approved' in params: if params['approved'] and params['approved'] == True:", "from the peer) (auth: creator, admin, or # peer secret)", "get(self, id, relationship, peerid): if self.request.get('_method') == 'PUT': self.put(id, relationship,", "if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT', peerid=peerid): self.response.set_status(403) return try: params", "only creator and admins allowed) # POST /trust with json", "self.request.get('peerid') relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid, type=type) if not relationships:", "GET /trust with query parameters (relationship, type, and peerid) to", "verificationToken = params['verify'] else: verificationToken = None except ValueError: self.response.set_status(400,", "and peerid) to retrieve trust relationships (auth: only creator and", "200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET', peerid=peerid): self.response.set_status(403) return", "default relationship and auto-accept, no # auth required) # GET", "+ new_trust.relationship + \"/\" + new_trust.peerid)) pair = { 'baseuri':", "myself: return if not check.checkAuthorisation(path='trust', subpath='<type>', method='POST'): self.response.set_status(403) return try:", "'Missing peer URL') return secret = Config.newToken() new_trust = myself.createReciprocalTrust(", "params: desc = params['desc'] else: desc = '' if 'verify'", "self.post(id, relationship) return self.response.set_status(404, \"Not found\") def put(self, id, relationship):", "template import json import logging import datetime import time #", "found') return my_trust = relationships[0] # If the peer did", "params: if params['approved'] and params['approved'] == True: peer_approved = True", "myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404, 'Not found') return", "'relationship' in params: relationship = params['relationship'] if 'type' in params:", "params['approved'] == True or params['approved'].lower() == \"true\": approved = True", "desc = '' if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri, approved=approved, desc=desc): self.response.set_status(204,", "desc = '' if 'verify' in params: verificationToken = params['verify']", "a json body to change details on a relationship (baseuri,", "peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if", "to retrieve trust relationships (auth: only creator and admins allowed)", "else: creator = None except ValueError: self.response.set_status(400, 'No json content')", "myself: return if relationship != 'trustee': self.response.set_status(404, \"Not found\") return", "params['baseuri'] else: baseuri = '' if 'id' in params: peerid", "check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET', peerid=peerid):", "self.request.get('approved').lower() == \"true\": approved = True else: approved = None", "(Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not", "param peer=true is a way of forcing no deletion of", "(auth: only creator and admins allowed) # POST /trust with", "= relationships[0] # If the peer did a GET to", "or params['approved'].lower() == \"true\": approved = True else: approved =", "(Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if not myself", "add_response=False) if not myself: return if relationship != 'trustee': self.response.set_status(404,", "my_trust.approved, 'peer_approved': my_trust.peer_approved, 'verified': my_trust.verified, 'verificationToken': verificationToken, 'type': my_trust.type, 'desc':", "if approved: self.response.set_status(201, 'Created') else: self.response.set_status(202, 'Accepted') # Handling requests", "trust relationships (auth: only creator and admins allowed) # POST", "id, relationship): if self.request.get('_method') == 'POST': self.post(id, relationship) return self.response.set_status(404,", "= '' if 'id' in params: peerid = params['id'] else:", "relationship yet if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE', peerid=peerid, approved=False): self.response.set_status(403)", "a relationship, assume that peer has approved new_trust = myself.createVerifiedTrust(baseuri=baseuri,", "check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403) return myself.deleteProperty('trustee_root') self.response.set_status(204, 'No content') def post(self,", "peer_approved=peer_approved): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def put(self, id,", "relationship): if self.request.get('_method') == 'POST': self.post(id, relationship) return self.response.set_status(404, \"Not", "'trustee': self.response.set_status(404, \"Not found\") return # Access is the same", "auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not myself or check.response[\"code\"] !=", "= Config.newToken() new_trust = myself.createReciprocalTrust( url=url, secret=secret, desc=desc, relationship=relationship, type=type)", "'Ok') else: self.response.set_status(405, 'Not modified') def delete(self, id, relationship, peerid):", "'PUT': self.put(id, relationship, peerid) return if self.request.get('_method') == 'DELETE': self.delete(id,", "isPeer: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True)", "my_trust.peerid, 'relationship': my_trust.relationship, 'approved': my_trust.approved, 'peer_approved': my_trust.peer_approved, 'verified': my_trust.verified, 'verificationToken':", "auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself or (check.response[\"code\"]", "== \"true\": isPeer = True Config = config.config() relationships =", "params['type'] else: type = '' if 'secret' in params: secret", "/trust handlers # # GET /trust with query parameters (relationship,", "We allow non-approved peers to delete even if we haven't", "/trust/{relationship}}/{actorid} with a json body to change details on a", "another (reciprocal relationship) (auth: only creator and admins allowed) #", "!= 'trustee': self.response.set_status(404, \"Not found\") return # Access is the", "len(url) == 0: self.response.set_status(400, 'Missing peer URL') return secret =", "self.response.set_status(408, 'Unable to create trust relationship') return self.response.headers.add_header( \"Location\", str(Config.root", "same as /trust if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return try:", "def put(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self,", "self.request.get('type') peerid = self.request.get('peerid') relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid, type=type)", "information to a peer about changes in the relationship #", "url=url, secret=secret, desc=desc, relationship=relationship, type=type) if not new_trust: self.response.set_status(408, 'Unable", "received a request for a relationship, assume that peer has", "rel.verified, 'type': rel.type, 'desc': rel.desc, 'secret': rel.secret, }) out =", "self.response.set_status(400, 'No json content') return if self.request.get('approved') and len(self.request.get('approved')) >", "approved=approved, secret=secret, verificationToken=verificationToken, type=type, peer_approved=True, relationship=relationship, desc=desc) if not new_trust:", "relationships[0] if isPeer: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else: deleted =", "# GET /trust with query parameters (relationship, type, and peerid)", "self.response.set_status(403, 'Forbidden') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/'", "peerGet.lower() == \"true\": isPeer = True Config = config.config() relationships", "self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/' + new_trust.relationship +", "if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified')", "new_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if", "0: self.response.set_status(400, 'Missing mandatory attributes') return if Config.auto_accept_default_relationship and Config.default_relationship", "not myself: return if relationship != 'trustee': self.response.set_status(404, \"Not found\")", "= params['trustee_root'] else: trustee_root = '' if 'creator' in params:", "baseuri = self.request.get('baseuri') else: baseuri = '' if self.request.get('desc') and", "method='GET', peerid=peerid): self.response.set_status(403) return relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if", "in params: desc = params['desc'] except ValueError: url = self.request.get('url')", "if we haven't approved the relationship yet if not check.checkAuthorisation(path='trust',", "'peer_approved': my_trust.peer_approved, 'verified': my_trust.verified, 'verificationToken': verificationToken, 'type': my_trust.type, 'desc': my_trust.desc,", "baseuri = params['baseuri'] else: baseuri = '' if 'id' in", "<reponame>actingweb/box-actingweb<filename>aw-actor-trust.py #!/usr/bin/env python # from actingweb import actor from actingweb", "rel.peer_approved, 'verified': rel.verified, 'type': rel.type, 'desc': rel.desc, 'secret': rel.secret, })", "create trust relationship') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id +", "# Since we received a request for a relationship, assume", "return secret = '' desc = '' relationship = Config.default_relationship", "my_trust.peer_approved, 'verified': my_trust.verified, 'verificationToken': verificationToken, 'type': my_trust.type, 'desc': my_trust.desc, 'secret':", "= params['desc'] else: desc = '' if 'verify' in params:", "= myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if not", "'desc': rel.desc, 'secret': rel.secret, }) out = json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"]", "for default relationship and auto-accept, no # auth required) #", "params['desc'] else: desc = '' if 'approved' in params: if", "in the relationship # PUT /trust/{relationship}}/{actorid} with a json body", "True else: approved = None if self.request.get('baseuri') and len(self.request.get('baseuri')) >", "else: desc = '' if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri, approved=approved, desc=desc):", "True Config = config.config() relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if", "'baseuri': my_trust.baseuri, 'id': myself.id, 'peerid': my_trust.peerid, 'relationship': my_trust.relationship, 'approved': my_trust.approved,", "/trust/friend class relationshipHandler(webapp2.RequestHandler): def get(self, id, relationship): if self.request.get('_method') ==", "post(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id,", "== True or params['approved'].lower() == \"true\": approved = True else:", "on a specific relationship (auth: creator, admin, or peer secret)", "ValueError: self.response.set_status(400, 'No json content') return if len(trustee_root) > 0:", "self.request.get('_method') == 'POST': self.post(id) return (Config, myself, check) = auth.init_actingweb(appreq=self,", "peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False)", "> 0: desc = self.request.get('desc') else: desc = '' if", "'verified': new_trust.verified, 'type': new_trust.type, 'desc': new_trust.desc, 'secret': new_trust.secret, } out", "0: myself.setProperty('trustee_root', trustee_root) if creator: myself.modify(creator=creator) self.response.set_status(204, 'No content') def", "new_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(201,", "return if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not", "or peer secret) # DELETE /trust/{relationship}}/{actorid} to delete a relationship", "if check.trust and check.trust.peerid == peerid: isPeer = True else:", "with json body to create new trust # relationship (see", "else: self.response.set_status(405, 'Not modified') def delete(self, id, relationship, peerid): (Config,", "purposes) peerGet = self.request.get('peer').lower() if peerGet.lower() == \"true\": isPeer =", "not a peer (primarily for testing purposes) peerGet = self.request.get('peer').lower()", "relationships: pairs.append({ 'baseuri': rel.baseuri, 'id': myself.id, 'peerid': rel.peerid, 'relationship': rel.relationship,", "self.response.set_status(204, 'No content') def post(self, id, relationship): (Config, myself, check)", "self.request.get('peer').lower() if peerGet.lower() == \"true\": isPeer = True Config =", "json content') return if len(baseuri) == 0 or len(peerid) ==", "from google.appengine.ext.webapp import template import json import logging import datetime", "check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return", "peerid=peerid): self.response.set_status(403) return relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not", "not check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403) return relationship = '' type =", "new_trust: self.response.set_status(408, 'Unable to create trust relationship') return self.response.headers.add_header( \"Location\",", "'' if 'desc' in params: desc = params['desc'] else: desc", "creator, admin, or # peer secret) # Handling requests to", "is the same as /trust if not check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403)", "return if not check.checkAuthorisation(path='trust', subpath='<type>', method='POST'): self.response.set_status(403) return try: params", "not my_trust.verified: my_trust.modify(verified=True) verificationToken = my_trust.verificationToken else: verificationToken = ''", "a request for a relationship, assume that peer has approved", "my_trust.modify(verified=True) verificationToken = my_trust.verificationToken else: verificationToken = '' pair =", "peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved =", "not new_trust: self.response.set_status(408, 'Unable to create trust relationship') return self.response.headers.add_header(", "details on a relationship (baseuri, secret, desc) (auth: creator, #", "get(self, id): if self.request.get('_method') == 'POST': self.post(id) return (Config, myself,", "0: baseuri = self.request.get('baseuri') else: baseuri = '' if self.request.get('desc')", "forcing no deletion of a peer # relationship even when", "desc = params['desc'] else: desc = '' if 'approved' in", "peerid) return logging.debug('GET trust headers: ' + str(self.request.headers)) (Config, myself,", "GET /trust/{relationship}}/{actorid} to get details on a specific relationship (auth:", "import config from actingweb import trust from actingweb import auth", "pair = { 'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid, 'relationship':", "deletePeer=True) if not deleted: self.response.set_status(502, 'Not able to delete relationship", "= None except ValueError: self.response.set_status(400, 'No json content') return if", "we received a request for a relationship, assume that peer", "if not new_trust: self.response.set_status(403, 'Forbidden') return self.response.headers.add_header( \"Location\", str(Config.root +", "details on a specific relationship (auth: creator, admin, or peer", "id=id, path='trust', subpath=relationship, add_response=False) if not myself: return if relationship", "relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404, 'Not found') return my_trust", "if 'approved' in params: if params['approved'] == True or params['approved'].lower()", "not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST', peerid=peerid): self.response.set_status(403) return try: params =", "a relationship (baseuri, secret, desc) (auth: creator, # admin, or", "else: desc = '' if 'approved' in params: if params['approved']", "'approved' in params: if params['approved'] == True or params['approved'].lower() ==", "my_trust.relationship, 'approved': my_trust.approved, 'peer_approved': my_trust.peer_approved, 'verified': my_trust.verified, 'verificationToken': verificationToken, 'type':", "!= \"PUT\": self.response.set_status(400, 'No json content') return if self.request.get('approved') and", "desc = '' relationship = Config.default_relationship type = '' try:", "else: verificationToken = None except ValueError: self.response.set_status(400, 'No json content')", "if not relationships: self.response.set_status(404, 'Not found') return my_trust = relationships[0]", "to change details on a relationship (baseuri, secret, desc) (auth:", "'' desc = '' relationship = Config.default_relationship type = ''", "content') return if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved): self.response.set_status(204, 'Ok') else: self.response.set_status(405,", "delete even if we haven't approved the relationship yet if", "0 or len(peerid) == 0 or len(type) == 0: self.response.set_status(400,", "= self.request.get('type') if len(url) == 0: self.response.set_status(400, 'Missing peer URL')", "put(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id,", "return # Access is the same as /trust if not", "peerid): if self.request.get('_method') == 'PUT': self.put(id, relationship, peerid) return if", "else: trustee_root = '' if 'creator' in params: creator =", "type=type, peer_approved=True, relationship=relationship, desc=desc) if not new_trust: self.response.set_status(403, 'Forbidden') return", "'secret': my_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\"", "# Handling requests to trust/ class rootHandler(webapp2.RequestHandler): def get(self, id):", "rel.secret, }) out = json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(200,", "# actor and another (reciprocal relationship) (auth: only creator and", "attributes') return if Config.auto_accept_default_relationship and Config.default_relationship == relationship: approved =", "= '' peerid = '' relationship = self.request.get('relationship') type =", "to verify if check.trust and check.trust.peerid == peerid and not", "(auth: creator, admin, or # peer secret) # Handling requests", "'Ok') def post(self, id): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id,", "relationship, peerid) return if self.request.get('_method') == 'DELETE': self.delete(id, relationship, peerid)", "len(trustee_root) > 0: myself.setProperty('trustee_root', trustee_root) if creator: myself.modify(creator=creator) self.response.set_status(204, 'No", "delete relationship with peer.') return self.response.set_status(204, 'Ok') application = webapp2.WSGIApplication([", "= self.request.get('relationship') type = self.request.get('type') if len(url) == 0: self.response.set_status(400,", "rel.desc, 'secret': rel.secret, }) out = json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"] =", "\"application/json\" self.response.set_status(200, 'Ok') def post(self, id): (Config, myself, check) =", "secret = Config.newToken() new_trust = myself.createReciprocalTrust( url=url, secret=secret, desc=desc, relationship=relationship,", "except ValueError: self.response.set_status(400, 'No json content') return if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid,", "deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if", "path='trust') if not myself or check.response[\"code\"] != 200: return if", "check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403) return relationship = '' type = ''", "= self.request.get('peerid') relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid, type=type) if not", "new_trust.relationship + \"/\" + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri,", "def get(self, id, relationship, peerid): if self.request.get('_method') == 'PUT': self.put(id,", "import template import json import logging import datetime import time", "except ValueError: url = self.request.get('url') relationship = self.request.get('relationship') type =", "check.trust and check.trust.peerid == peerid and not my_trust.verified: my_trust.modify(verified=True) verificationToken", "= self.request.get('baseuri') else: baseuri = '' if self.request.get('desc') and len(self.request.get('desc'))", "peer secret) # POST /trust/{relationship}}/{actorid} to send information to a", "secret, desc) (auth: creator, # admin, or peer secret) #", "= json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in params: baseuri = params['baseuri']", "else: approved = None if self.request.get('baseuri') and len(self.request.get('baseuri')) > 0:", "webapp2.Route(r'/<id>/trust<:/?>', rootHandler, name='rootHandler'), webapp2.Route(r'/<id>/trust/<relationship><:/?>', relationshipHandler, name='relationshipHandler'), webapp2.Route(r'/<id>/trust/<relationship>/<peerid><:/?>', trustHandler, name='trustHandler'), ],", "'' relationship = Config.default_relationship type = '' try: params =", "= json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved = None if 'approved' in params:", "auth required) # GET /trust/{relationship}}/{actorid} to get details on a", "rel.peerid, 'relationship': rel.relationship, 'approved': rel.approved, 'peer_approved': rel.peer_approved, 'verified': rel.verified, 'type':", "relationship=relationship, type=type) if not new_trust: self.response.set_status(408, 'Unable to create trust", "200 and check.response[\"code\"] != 401): auth.add_auth_response(appreq=self, auth_obj=check) return # We", "self.response.set_status(404, \"Not found\") def put(self, id, relationship): (Config, myself, check)", "= '' pair = { 'baseuri': my_trust.baseuri, 'id': myself.id, 'peerid':", "found\") def put(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self,", "if 'url' in params: url = params['url'] else: url =", "if 'id' in params: peerid = params['id'] else: peerid =", "= \"application/json\" self.response.set_status(201, 'Created') # Handling requests to /trust/*, e.g.", "'' if 'id' in params: peerid = params['id'] else: peerid", "creator, # admin, or peer secret) # DELETE /trust/{relationship}}/{actorid} to", "json body to initiate a trust relationship between this #", "relationship) (auth: only creator and admins allowed) # POST /trust/{relationship}", "id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust',", "baseuri = '' if 'desc' in params: desc = params['desc']", "and another (reciprocal relationship) (auth: only creator and admins allowed)", "self.request.get('desc') else: desc = '' if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri, approved=approved,", "approved=approved, desc=desc): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def delete(self,", "self.response.set_status(403) return relationship = '' type = '' peerid =", "self.response.set_status(405, 'Not modified') def delete(self, id, relationship, peerid): (Config, myself,", "content') return if len(baseuri) == 0 or len(peerid) == 0", "not check.checkAuthorisation(path='trust', subpath='<type>', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8',", "a way of forcing no deletion of a peer #", "config.config() relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404,", "put(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust',", "from actingweb import config from actingweb import trust from actingweb", "params: verificationToken = params['verify'] else: verificationToken = None except ValueError:", "trust headers: ' + str(self.request.headers)) (Config, myself, check) = auth.init_actingweb(appreq=self,", "new_trust.peer_approved, 'verified': new_trust.verified, 'type': new_trust.type, 'desc': new_trust.desc, 'secret': new_trust.secret, }", "'ignore')) if 'trustee_root' in params: trustee_root = params['trustee_root'] else: trustee_root", "my_trust.baseuri, 'id': myself.id, 'peerid': my_trust.peerid, 'relationship': my_trust.relationship, 'approved': my_trust.approved, 'peer_approved':", "/trust/*, e.g. /trust/friend class relationshipHandler(webapp2.RequestHandler): def get(self, id, relationship): if", "myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def", "change details on a relationship (baseuri, secret, desc) (auth: creator,", "self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'trustee_root' in", "trustee_root = params['trustee_root'] else: trustee_root = '' if 'creator' in", "\"Not found\") return # Access is the same as /trust", "else: baseuri = '' if 'id' in params: peerid =", "'Forbidden') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/' +", "200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST', peerid=peerid): self.response.set_status(403) return", "if self.request.get('_method') == 'DELETE': self.delete(id, relationship, peerid) return logging.debug('GET trust", "'DELETE': self.delete(id, relationship, peerid) return logging.debug('GET trust headers: ' +", "json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if my_trust.approved: self.response.set_status(200, 'Ok') else:", "= self.request.get('desc') else: desc = '' if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri,", "in relationships: pairs.append({ 'baseuri': rel.baseuri, 'id': myself.id, 'peerid': rel.peerid, 'relationship':", "or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403)", "= '' if self.request.get('desc') and len(self.request.get('desc')) > 0: desc =", "auth import webapp2 import os from google.appengine.ext.webapp import template import", "self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(201, 'Created') # Handling requests to", "else: approved = False # Since we received a request", "config from actingweb import trust from actingweb import auth import", "not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE', peerid=peerid, approved=False): self.response.set_status(403) return isPeer =", "and len(self.request.get('baseuri')) > 0: baseuri = self.request.get('baseuri') else: baseuri =", "found') return my_trust = relationships[0] if isPeer: deleted = myself.deleteReciprocalTrust(peerid=peerid,", "return self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/' + new_trust.relationship", "= \"application/json\" if approved: self.response.set_status(201, 'Created') else: self.response.set_status(202, 'Accepted') #", "config.py for default relationship and auto-accept, no # auth required)", "if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return secret = '' desc", "query parameters (relationship, type, and peerid) to retrieve trust relationships", "not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore'))", "if len(baseuri) == 0 or len(peerid) == 0 or len(type)", "if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST', peerid=peerid): self.response.set_status(403) return try: params", "or len(type) == 0: self.response.set_status(400, 'Missing mandatory attributes') return if", "}) out = json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(200, 'Ok')", "peer_approved=True, relationship=relationship, desc=desc) if not new_trust: self.response.set_status(403, 'Forbidden') return self.response.headers.add_header(", "Handling requests to trust/ class rootHandler(webapp2.RequestHandler): def get(self, id): if", "get details on a specific relationship (auth: creator, admin, or", "'Ok') else: self.response.set_status(405, 'Not modified') def put(self, id, relationship, peerid):", "peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri'", "ValueError: url = self.request.get('url') relationship = self.request.get('relationship') type = self.request.get('type')", "json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if approved: self.response.set_status(201, 'Created') else:", "POST /trust with json body to initiate a trust relationship", "not myself: return if not check.checkAuthorisation(path='trust', subpath='<type>', method='POST'): self.response.set_status(403) return", "'id': myself.id, 'peerid': my_trust.peerid, 'relationship': my_trust.relationship, 'approved': my_trust.approved, 'peer_approved': my_trust.peer_approved,", "self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def delete(self, id, relationship,", "myself.deleteProperty('trustee_root') self.response.set_status(204, 'No content') def post(self, id, relationship): (Config, myself,", "isPeer = True else: # Use of GET param peer=true", "baseuri = '' if 'id' in params: peerid = params['id']", "else: peerid = '' if 'type' in params: type =", "my_trust.verified, 'verificationToken': verificationToken, 'type': my_trust.type, 'desc': my_trust.desc, 'secret': my_trust.secret, }", "self.response.set_status(201, 'Created') # Handling requests to /trust/*, e.g. /trust/friend class", "delete(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust',", "(with # ?peer=true if the delete is from the peer)", "params: type = params['type'] else: type = '' if 'secret'", "specific relationship (auth: creator, admin, or peer secret) # POST", "post(self, id): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if", "approved = True else: approved = False # Since we", "False # Since we received a request for a relationship,", "out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if approved: self.response.set_status(201,", "subpath='<type>/<id>', method='GET', peerid=peerid): self.response.set_status(403) return relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid)", "about changes in the relationship # PUT /trust/{relationship}}/{actorid} with a", "'Not found') return my_trust = relationships[0] # If the peer", "if self.request.get('approved') and len(self.request.get('approved')) > 0: if self.request.get('approved').lower() == \"true\":", "subpath=relationship, add_response=False) if not myself or (check.response[\"code\"] != 200 and", "if self.request.get('_method') == 'POST': self.post(id) return (Config, myself, check) =", "myself.id + '/trust/' + new_trust.relationship + \"/\" + new_trust.peerid)) pair", "logging.debug('GET trust headers: ' + str(self.request.headers)) (Config, myself, check) =", "if 'creator' in params: creator = params['creator'] else: creator =", "'relationship': new_trust.relationship, 'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified, 'type': new_trust.type,", "relationship, assume that peer has approved new_trust = myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid,", "new_trust: self.response.set_status(403, 'Forbidden') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id +", "self.request.get('_method') != \"PUT\": self.response.set_status(400, 'No json content') return if self.request.get('approved')", "peer # relationship even when requestor is not a peer", "check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if", "Access is the same as /trust if not check.checkAuthorisation(path='trust', method='DELETE'):", "from actingweb import auth import webapp2 import os from google.appengine.ext.webapp", "and not my_trust.verified: my_trust.modify(verified=True) verificationToken = my_trust.verificationToken else: verificationToken =", "True or params['approved'].lower() == \"true\": approved = True else: approved", "not myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust',", "allowed) # POST /trust with json body to initiate a", "params['relationship'] if 'type' in params: type = params['type'] if 'desc'", "# If the peer did a GET to verify if", "method='PUT', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if", "secret = '' desc = '' relationship = Config.default_relationship type", "admin, or peer secret) # POST /trust/{relationship}}/{actorid} to send information", "peer secret) # Handling requests to trust/ class rootHandler(webapp2.RequestHandler): def", "for rel in relationships: pairs.append({ 'baseuri': rel.baseuri, 'id': myself.id, 'peerid':", "myself.id, 'peerid': new_trust.peerid, 'relationship': new_trust.relationship, 'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified':", "self.response.set_status(202, 'Accepted') # Handling requests to specific relationships, e.g. /trust/friend/12f2ae53bd", "= \"application/json\" self.response.set_status(200, 'Ok') def post(self, id): (Config, myself, check)", "relationship = self.request.get('relationship') type = self.request.get('type') if len(url) == 0:", "relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship,", "content') return if self.request.get('approved') and len(self.request.get('approved')) > 0: if self.request.get('approved').lower()", "myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', method='POST'):", "peer (primarily for testing purposes) peerGet = self.request.get('peer').lower() if peerGet.lower()", "except ValueError: self.response.set_status(400, 'No json content') return if len(trustee_root) >", "id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship,", "haven't approved the relationship yet if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE',", "peerid = '' relationship = self.request.get('relationship') type = self.request.get('type') peerid", "testing purposes) peerGet = self.request.get('peer').lower() if peerGet.lower() == \"true\": isPeer", "webapp2 import os from google.appengine.ext.webapp import template import json import", "rootHandler(webapp2.RequestHandler): def get(self, id): if self.request.get('_method') == 'POST': self.post(id) return", "is a way of forcing no deletion of a peer", "verify if check.trust and check.trust.peerid == peerid and not my_trust.verified:", "type=type) if not new_trust: self.response.set_status(408, 'Unable to create trust relationship')", "= True Config = config.config() relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid)", "rel.type, 'desc': rel.desc, 'secret': rel.secret, }) out = json.dumps(pairs) self.response.write(out)", "'ignore')) if 'url' in params: url = params['url'] else: url", "(Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if", "peer did a GET to verify if check.trust and check.trust.peerid", "even if we haven't approved the relationship yet if not", "= False # Since we received a request for a", "True else: approved = False # Since we received a", "to delete a relationship (with # ?peer=true if the delete", "'POST': self.post(id, relationship) return self.response.set_status(404, \"Not found\") def put(self, id,", "my_trust.desc, 'secret': my_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] =", "params['url'] else: url = '' if 'relationship' in params: relationship", "self.response.set_status(403) return isPeer = False if check.trust and check.trust.peerid ==", "return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in params:", "return self.response.set_status(404, \"Not found\") def put(self, id, relationship): (Config, myself,", "not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET', peerid=peerid): self.response.set_status(403) return relationships = myself.getTrustRelationships(", "check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET', peerid=peerid): self.response.set_status(403) return relationships = myself.getTrustRelationships( relationship=relationship,", "# GET /trust/{relationship}}/{actorid} to get details on a specific relationship", "= my_trust.verificationToken else: verificationToken = '' pair = { 'baseuri':", "of forcing no deletion of a peer # relationship even", "type, and peerid) to retrieve trust relationships (auth: only creator", "deletion of a peer # relationship even when requestor is", "json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(201, 'Created') # Handling requests", "= auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not myself or check.response[\"code\"]", "params: baseuri = params['baseuri'] else: baseuri = '' if 'desc'", "class relationshipHandler(webapp2.RequestHandler): def get(self, id, relationship): if self.request.get('_method') == 'POST':", "not relationships: self.response.set_status(404, 'Not found') return my_trust = relationships[0] if", "in params: if params['approved'] == True or params['approved'].lower() == \"true\":", "relationship = self.request.get('relationship') type = self.request.get('type') peerid = self.request.get('peerid') relationships", "if 'trustee_root' in params: trustee_root = params['trustee_root'] else: trustee_root =", "to initiate a trust relationship between this # actor and", "return my_trust = relationships[0] if isPeer: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False)", "/trust if not check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403) return myself.deleteProperty('trustee_root') self.response.set_status(204, 'No", "or (check.response[\"code\"] != 200 and check.response[\"code\"] != 401): auth.add_auth_response(appreq=self, auth_obj=check)", "ValueError: self.response.set_status(400, 'No json content') return if len(baseuri) == 0", "a specific relationship (auth: creator, admin, or peer secret) #", "self.response.set_status(403) return secret = '' desc = '' relationship =", "to /trust/*, e.g. /trust/friend class relationshipHandler(webapp2.RequestHandler): def get(self, id, relationship):", "'baseuri' in params: baseuri = params['baseuri'] else: baseuri = ''", "type = '' peerid = '' relationship = self.request.get('relationship') type", "!= 200 and check.response[\"code\"] != 401): auth.add_auth_response(appreq=self, auth_obj=check) return #", "of GET param peer=true is a way of forcing no", "desc=desc): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def delete(self, id,", "not myself or (check.response[\"code\"] != 200 and check.response[\"code\"] != 401):", "== 0: self.response.set_status(400, 'Missing peer URL') return secret = Config.newToken()", "secret=secret, desc=desc, relationship=relationship, type=type) if not new_trust: self.response.set_status(408, 'Unable to", "params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'trustee_root' in params: trustee_root =", "'peerid': my_trust.peerid, 'relationship': my_trust.relationship, 'approved': my_trust.approved, 'peer_approved': my_trust.peer_approved, 'verified': my_trust.verified,", "datetime import time # /trust handlers # # GET /trust", "params: creator = params['creator'] else: creator = None except ValueError:", "!= 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST', peerid=peerid): self.response.set_status(403)", "if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE', peerid=peerid, approved=False): self.response.set_status(403) return isPeer", "or # peer secret) # Handling requests to trust/ class", "post(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust',", "'' if 'approved' in params: if params['approved'] == True or", "baseuri=baseuri, approved=approved, desc=desc): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def", "else: # Use of GET param peer=true is a way", "return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET', peerid=peerid): self.response.set_status(403) return relationships", "import datetime import time # /trust handlers # # GET", "check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself:", "'id' in params: peerid = params['id'] else: peerid = ''", "auth.init_actingweb(appreq=self, id=id, path='trust') if not myself or check.response[\"code\"] != 200:", "that peer has approved new_trust = myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved, secret=secret,", "myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not myself", "trust # relationship (see config.py for default relationship and auto-accept,", "if my_trust.approved: self.response.set_status(200, 'Ok') else: self.response.set_status(202, 'Accepted') def post(self, id,", "# We allow non-approved peers to delete even if we", "= None if 'approved' in params: if params['approved'] and params['approved']", "relationship=relationship, desc=desc) if not new_trust: self.response.set_status(403, 'Forbidden') return self.response.headers.add_header( \"Location\",", "approved = None if self.request.get('baseuri') and len(self.request.get('baseuri')) > 0: baseuri", "as /trust if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return try: params", "try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved = None if 'approved'", "and check.trust.peerid == peerid: isPeer = True else: # Use", "self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(201, 'Created') # Handling requests to /trust/*,", "in params: type = params['type'] else: type = '' if", "relationship, peerid) return logging.debug('GET trust headers: ' + str(self.request.headers)) (Config,", "desc = '' if 'approved' in params: if params['approved'] ==", "return isPeer = False if check.trust and check.trust.peerid == peerid:", "myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', method='GET'):", "type=type) if not relationships: self.response.set_status(404, 'Not found') return pairs =", "'trustee_root' in params: trustee_root = params['trustee_root'] else: trustee_root = ''", "if not deleted: self.response.set_status(502, 'Not able to delete relationship with", "def post(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self,", "or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT',", "if 'verify' in params: verificationToken = params['verify'] else: verificationToken =", "= params['desc'] except ValueError: url = self.request.get('url') relationship = self.request.get('relationship')", "200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT', peerid=peerid): self.response.set_status(403) return", "body to initiate a trust relationship between this # actor", "subpath=relationship, add_response=False) if not myself: return if not check.checkAuthorisation(path='trust', subpath='<type>',", "method='DELETE'): self.response.set_status(403) return myself.deleteProperty('trustee_root') self.response.set_status(204, 'No content') def post(self, id,", "peer) (auth: creator, admin, or # peer secret) # Handling", "and check.trust.peerid == peerid and not my_trust.verified: my_trust.modify(verified=True) verificationToken =", "check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8',", "content') def post(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self,", "= True else: approved = False # Since we received", "check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not myself or", "params['creator'] else: creator = None except ValueError: self.response.set_status(400, 'No json", "} out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if approved:", "requests to specific relationships, e.g. /trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler): def get(self,", "myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if not deleted:", "is the same as /trust if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403)", "self.response.set_status(404, 'Not found') return my_trust = relationships[0] if isPeer: deleted", "= params['url'] else: url = '' if 'relationship' in params:", "in params: trustee_root = params['trustee_root'] else: trustee_root = '' if", "new_trust.verified, 'type': new_trust.type, 'desc': new_trust.desc, 'secret': new_trust.secret, } out =", "actor from actingweb import config from actingweb import trust from", "= params['secret'] else: secret = '' if 'desc' in params:", "params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'url' in params: url =", "== 'POST': self.post(id, relationship) return self.response.set_status(404, \"Not found\") def put(self,", "'No json content') return if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved): self.response.set_status(204, 'Ok')", "of a peer # relationship even when requestor is not", "peerid = '' if 'type' in params: type = params['type']", "self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def put(self, id, relationship,", "relationship (auth: creator, admin, or peer secret) # POST /trust/{relationship}}/{actorid}", "# /trust handlers # # GET /trust with query parameters", "# admin, or peer secret) # DELETE /trust/{relationship}}/{actorid} to delete", "a GET to verify if check.trust and check.trust.peerid == peerid", "rootHandler, name='rootHandler'), webapp2.Route(r'/<id>/trust/<relationship><:/?>', relationshipHandler, name='relationshipHandler'), webapp2.Route(r'/<id>/trust/<relationship>/<peerid><:/?>', trustHandler, name='trustHandler'), ], debug=True)", "401): auth.add_auth_response(appreq=self, auth_obj=check) return # We allow non-approved peers to", "myself.id, 'peerid': rel.peerid, 'relationship': rel.relationship, 'approved': rel.approved, 'peer_approved': rel.peer_approved, 'verified':", "else: url = '' if 'relationship' in params: relationship =", "== 0: self.response.set_status(400, 'Missing mandatory attributes') return if Config.auto_accept_default_relationship and", "'baseuri': rel.baseuri, 'id': myself.id, 'peerid': rel.peerid, 'relationship': rel.relationship, 'approved': rel.approved,", "peer=true is a way of forcing no deletion of a", "True else: approved = None except ValueError: if not self.request.get('_method')", "= config.config() relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships:", "retrieve trust relationships (auth: only creator and admins allowed) #", "secret) # DELETE /trust/{relationship}}/{actorid} to delete a relationship (with #", "# Handling requests to /trust/*, e.g. /trust/friend class relationshipHandler(webapp2.RequestHandler): def", "no deletion of a peer # relationship even when requestor", "deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if not deleted: self.response.set_status(502, 'Not able", "+ new_trust.relationship + '/' + new_trust.peerid)) pair = { 'baseuri':", "peer_approved = None if 'approved' in params: if params['approved'] and", "/trust/{relationship}}/{actorid} to delete a relationship (with # ?peer=true if the", "return relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404,", "baseuri = '' if self.request.get('desc') and len(self.request.get('desc')) > 0: desc", "relationshipHandler(webapp2.RequestHandler): def get(self, id, relationship): if self.request.get('_method') == 'POST': self.post(id,", "the relationship yet if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE', peerid=peerid, approved=False):", "# PUT /trust/{relationship}}/{actorid} with a json body to change details", "'verified': rel.verified, 'type': rel.type, 'desc': rel.desc, 'secret': rel.secret, }) out", "json content') return if len(trustee_root) > 0: myself.setProperty('trustee_root', trustee_root) if", "import trust from actingweb import auth import webapp2 import os", "verificationToken, 'type': my_trust.type, 'desc': my_trust.desc, 'secret': my_trust.secret, } out =", "and len(self.request.get('desc')) > 0: desc = self.request.get('desc') else: desc =", "relationship even when requestor is not a peer (primarily for", "'desc' in params: desc = params['desc'] else: desc = ''", "if 'type' in params: type = params['type'] else: type =", "the same as /trust if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return", "in params: peerid = params['id'] else: peerid = '' if", "= self.request.get('type') peerid = self.request.get('peerid') relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid,", "if not relationships: self.response.set_status(404, 'Not found') return pairs = []", "'Created') else: self.response.set_status(202, 'Accepted') # Handling requests to specific relationships,", "= '' if 'approved' in params: if params['approved'] == True", "actingweb import config from actingweb import trust from actingweb import", "if len(trustee_root) > 0: myself.setProperty('trustee_root', trustee_root) if creator: myself.modify(creator=creator) self.response.set_status(204,", "able to delete relationship with peer.') return self.response.set_status(204, 'Ok') application", "# auth required) # GET /trust/{relationship}}/{actorid} to get details on", "== 'PUT': self.put(id, relationship, peerid) return if self.request.get('_method') == 'DELETE':", "if not myself: return if not check.checkAuthorisation(path='trust', subpath='<type>', method='POST'): self.response.set_status(403)", "a trust relationship between this # actor and another (reciprocal", "new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid,", "== \"true\": approved = True else: approved = None except", "type = params['type'] if 'desc' in params: desc = params['desc']", "not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return secret = '' desc =", "trust relationship between this # actor and another (reciprocal relationship)", "not self.request.get('_method') or self.request.get('_method') != \"PUT\": self.response.set_status(400, 'No json content')", "non-approved peers to delete even if we haven't approved the", "rel in relationships: pairs.append({ 'baseuri': rel.baseuri, 'id': myself.id, 'peerid': rel.peerid,", "'type' in params: type = params['type'] if 'desc' in params:", "no # auth required) # GET /trust/{relationship}}/{actorid} to get details", "peerid = params['id'] else: peerid = '' if 'type' in", "(auth: only creator and admins allowed) # POST /trust/{relationship} with", "if creator: myself.modify(creator=creator) self.response.set_status(204, 'No content') def delete(self, id, relationship):", "0: self.response.set_status(400, 'Missing peer URL') return secret = Config.newToken() new_trust", "same as /trust if not check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403) return myself.deleteProperty('trustee_root')", "else: self.response.set_status(202, 'Accepted') # Handling requests to specific relationships, e.g.", "check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST', peerid=peerid):", "= True else: # Use of GET param peer=true is", "params: type = params['type'] if 'desc' in params: desc =", "> 0: if self.request.get('approved').lower() == \"true\": approved = True else:", "peerid=peerid) if not relationships: self.response.set_status(404, 'Not found') return my_trust =", "'verificationToken': verificationToken, 'type': my_trust.type, 'desc': my_trust.desc, 'secret': my_trust.secret, } out", "0: if self.request.get('approved').lower() == \"true\": approved = True else: approved", "# relationship (see config.py for default relationship and auto-accept, no", "self.request.get('_method') == 'POST': self.post(id, relationship) return self.response.set_status(404, \"Not found\") def", "on a relationship (baseuri, secret, desc) (auth: creator, # admin,", "a peer about changes in the relationship # PUT /trust/{relationship}}/{actorid}", "if Config.auto_accept_default_relationship and Config.default_relationship == relationship: approved = True else:", "verificationToken=verificationToken, type=type, peer_approved=True, relationship=relationship, desc=desc) if not new_trust: self.response.set_status(403, 'Forbidden')", "desc) (auth: creator, # admin, or peer secret) # DELETE", "actingweb import auth import webapp2 import os from google.appengine.ext.webapp import", "requests to /trust/*, e.g. /trust/friend class relationshipHandler(webapp2.RequestHandler): def get(self, id,", "= webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler, name='rootHandler'), webapp2.Route(r'/<id>/trust/<relationship><:/?>', relationshipHandler, name='relationshipHandler'), webapp2.Route(r'/<id>/trust/<relationship>/<peerid><:/?>', trustHandler,", "and auto-accept, no # auth required) # GET /trust/{relationship}}/{actorid} to", "return if Config.auto_accept_default_relationship and Config.default_relationship == relationship: approved = True", "'No content') def post(self, id, relationship): (Config, myself, check) =", "delete is from the peer) (auth: creator, admin, or #", "try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'url' in params: url", "self.response.set_status(200, 'Ok') else: self.response.set_status(202, 'Accepted') def post(self, id, relationship, peerid):", "+ '/trust/' + new_trust.relationship + '/' + new_trust.peerid)) pair =", "return # We allow non-approved peers to delete even if", "'' type = '' peerid = '' relationship = self.request.get('relationship')", "to trust/ class rootHandler(webapp2.RequestHandler): def get(self, id): if self.request.get('_method') ==", "str(Config.root + myself.id + '/trust/' + new_trust.relationship + '/' +", "(reciprocal relationship) (auth: only creator and admins allowed) # POST", "trust from actingweb import auth import webapp2 import os from", "approved=False): self.response.set_status(403) return isPeer = False if check.trust and check.trust.peerid", "relationships[0] # If the peer did a GET to verify", "this # actor and another (reciprocal relationship) (auth: only creator", "isPeer = True Config = config.config() relationships = myself.getTrustRelationships( relationship=relationship,", "+ '/' + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id':", "in params: url = params['url'] else: url = '' if", "send information to a peer about changes in the relationship", "params['approved'].lower() == \"true\": approved = True else: approved = None", "if the delete is from the peer) (auth: creator, admin,", "params['trustee_root'] else: trustee_root = '' if 'creator' in params: creator", "'POST': self.post(id) return (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust')", "with json body to initiate a trust relationship between this", "peerid and not my_trust.verified: my_trust.modify(verified=True) verificationToken = my_trust.verificationToken else: verificationToken", "peers to delete even if we haven't approved the relationship", "else: verificationToken = '' pair = { 'baseuri': my_trust.baseuri, 'id':", "a peer (primarily for testing purposes) peerGet = self.request.get('peer').lower() if", "relationship between this # actor and another (reciprocal relationship) (auth:", "self.delete(id, relationship, peerid) return logging.debug('GET trust headers: ' + str(self.request.headers))", "'' pair = { 'baseuri': my_trust.baseuri, 'id': myself.id, 'peerid': my_trust.peerid,", "= [] for rel in relationships: pairs.append({ 'baseuri': rel.baseuri, 'id':", "self.post(id) return (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if", "== \"true\": approved = True else: approved = None if", "out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(201, 'Created') #", "POST /trust/{relationship} with json body to create new trust #", "return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST', peerid=peerid): self.response.set_status(403) return try:", "desc = params['desc'] except ValueError: url = self.request.get('url') relationship =", "self.request.get('approved') and len(self.request.get('approved')) > 0: if self.request.get('approved').lower() == \"true\": approved", "None if self.request.get('baseuri') and len(self.request.get('baseuri')) > 0: baseuri = self.request.get('baseuri')", "= '' if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri, approved=approved, desc=desc): self.response.set_status(204, 'Ok')", "'type': new_trust.type, 'desc': new_trust.desc, 'secret': new_trust.secret, } out = json.dumps(pair)", "params['approved'] and params['approved'] == True: peer_approved = True except ValueError:", "POST /trust/{relationship}}/{actorid} to send information to a peer about changes", "peerid) return if self.request.get('_method') == 'DELETE': self.delete(id, relationship, peerid) return", "return if self.request.get('approved') and len(self.request.get('approved')) > 0: if self.request.get('approved').lower() ==", "new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified, 'type': new_trust.type, 'desc': new_trust.desc, 'secret':", "peerid) to retrieve trust relationships (auth: only creator and admins", "self.response.set_status(400, 'No json content') return if len(trustee_root) > 0: myself.setProperty('trustee_root',", "'peerid': new_trust.peerid, 'relationship': new_trust.relationship, 'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified,", "peerGet = self.request.get('peer').lower() if peerGet.lower() == \"true\": isPeer = True", "= '' type = '' peerid = '' relationship =", "new_trust.desc, 'secret': new_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] =", "if params['approved'] and params['approved'] == True: peer_approved = True except", "check.trust.peerid == peerid: isPeer = True else: # Use of", "peer has approved new_trust = myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved, secret=secret, verificationToken=verificationToken,", "peerid=peerid, peer_approved=peer_approved): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def put(self,", "'ignore')) if 'baseuri' in params: baseuri = params['baseuri'] else: baseuri", "relationship, peerid): if self.request.get('_method') == 'PUT': self.put(id, relationship, peerid) return", "type = self.request.get('type') peerid = self.request.get('peerid') relationships = myself.getTrustRelationships( relationship=relationship,", "params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in params: baseuri =", "and admins allowed) # POST /trust with json body to", "desc=desc) if not new_trust: self.response.set_status(403, 'Forbidden') return self.response.headers.add_header( \"Location\", str(Config.root", "# relationship even when requestor is not a peer (primarily", "relationships: self.response.set_status(404, 'Not found') return my_trust = relationships[0] if isPeer:", "> 0: myself.setProperty('trustee_root', trustee_root) if creator: myself.modify(creator=creator) self.response.set_status(204, 'No content')", "relationships, e.g. /trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler): def get(self, id, relationship, peerid):", "myself.getTrustRelationships( relationship=relationship, peerid=peerid, type=type) if not relationships: self.response.set_status(404, 'Not found')", "if not self.request.get('_method') or self.request.get('_method') != \"PUT\": self.response.set_status(400, 'No json", "trust relationship') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/'", "myself.setProperty('trustee_root', trustee_root) if creator: myself.modify(creator=creator) self.response.set_status(204, 'No content') def delete(self,", "Use of GET param peer=true is a way of forcing", "if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri, approved=approved, desc=desc): self.response.set_status(204, 'Ok') else: self.response.set_status(405,", "new_trust.relationship + '/' + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri,", "class rootHandler(webapp2.RequestHandler): def get(self, id): if self.request.get('_method') == 'POST': self.post(id)", "'' if 'relationship' in params: relationship = params['relationship'] if 'type'", "not deleted: self.response.set_status(502, 'Not able to delete relationship with peer.')", "myself.createReciprocalTrust( url=url, secret=secret, desc=desc, relationship=relationship, type=type) if not new_trust: self.response.set_status(408,", "when requestor is not a peer (primarily for testing purposes)", "peer secret) # DELETE /trust/{relationship}}/{actorid} to delete a relationship (with", "check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403) return", "= myself.getTrustRelationships( relationship=relationship, peerid=peerid, type=type) if not relationships: self.response.set_status(404, 'Not", "= '' if 'creator' in params: creator = params['creator'] else:", "params: desc = params['desc'] else: desc = '' if 'approved'", "deleted: self.response.set_status(502, 'Not able to delete relationship with peer.') return", "= json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if my_trust.approved: self.response.set_status(200, 'Ok')", "admins allowed) # POST /trust with json body to initiate", "\"true\": approved = True else: approved = None except ValueError:", "GET param peer=true is a way of forcing no deletion", "'No json content') return if self.request.get('approved') and len(self.request.get('approved')) > 0:", "relationships: self.response.set_status(404, 'Not found') return my_trust = relationships[0] # If", "# POST /trust with json body to initiate a trust", "= params['type'] else: type = '' if 'secret' in params:", "= self.request.get('url') relationship = self.request.get('relationship') type = self.request.get('type') if len(url)", "(check.response[\"code\"] != 200 and check.response[\"code\"] != 401): auth.add_auth_response(appreq=self, auth_obj=check) return", "# ?peer=true if the delete is from the peer) (auth:", "path='trust', subpath=relationship, add_response=False) if not myself: return if relationship !=", "#!/usr/bin/env python # from actingweb import actor from actingweb import", "out = json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(200, 'Ok') def", "Handling requests to /trust/*, e.g. /trust/friend class relationshipHandler(webapp2.RequestHandler): def get(self,", "self.response.set_status(200, 'Ok') def post(self, id): (Config, myself, check) = auth.init_actingweb(appreq=self,", "True else: # Use of GET param peer=true is a", "/trust/{relationship}}/{actorid} to get details on a specific relationship (auth: creator,", "= json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(201, 'Created') # Handling", "self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved = None", "path='trust', subpath=relationship, add_response=False) if not myself: return if not check.checkAuthorisation(path='trust',", "desc = self.request.get('desc') else: desc = '' if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid,", "if params['approved'] == True or params['approved'].lower() == \"true\": approved =", "if 'type' in params: type = params['type'] if 'desc' in", "handlers # # GET /trust with query parameters (relationship, type,", "= params['desc'] else: desc = '' if 'approved' in params:", "self.request.get('baseuri') and len(self.request.get('baseuri')) > 0: baseuri = self.request.get('baseuri') else: baseuri", "if peerGet.lower() == \"true\": isPeer = True Config = config.config()", "assume that peer has approved new_trust = myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved,", "'Accepted') # Handling requests to specific relationships, e.g. /trust/friend/12f2ae53bd class", "trustee_root) if creator: myself.modify(creator=creator) self.response.set_status(204, 'No content') def delete(self, id,", "or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET',", "verificationToken = my_trust.verificationToken else: verificationToken = '' pair = {", "self.response.set_status(405, 'Not modified') def put(self, id, relationship, peerid): (Config, myself,", "\"PUT\": self.response.set_status(400, 'No json content') return if self.request.get('approved') and len(self.request.get('approved'))", "def delete(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id,", "new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid, 'relationship': new_trust.relationship, 'approved': new_trust.approved, 'peer_approved':", "a relationship (with # ?peer=true if the delete is from", "params['type'] if 'desc' in params: desc = params['desc'] except ValueError:", "= True except ValueError: self.response.set_status(400, 'No json content') return if", "'' if self.request.get('desc') and len(self.request.get('desc')) > 0: desc = self.request.get('desc')", "id, relationship, peerid): if self.request.get('_method') == 'PUT': self.put(id, relationship, peerid)", "allow non-approved peers to delete even if we haven't approved", "peer.') return self.response.set_status(204, 'Ok') application = webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler, name='rootHandler'),", "pairs = [] for rel in relationships: pairs.append({ 'baseuri': rel.baseuri,", "!= 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET', peerid=peerid): self.response.set_status(403)", "verificationToken = '' pair = { 'baseuri': my_trust.baseuri, 'id': myself.id,", "relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship)", "# Handling requests to specific relationships, e.g. /trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler):", "= self.request.get('relationship') type = self.request.get('type') peerid = self.request.get('peerid') relationships =", "in params: relationship = params['relationship'] if 'type' in params: type", "'' if 'secret' in params: secret = params['secret'] else: secret", "return if self.request.get('_method') == 'DELETE': self.delete(id, relationship, peerid) return logging.debug('GET", "+ '/trust/' + new_trust.relationship + \"/\" + new_trust.peerid)) pair =", "class trustHandler(webapp2.RequestHandler): def get(self, id, relationship, peerid): if self.request.get('_method') ==", "json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved = None if 'approved' in params: if", "= '' try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'url' in", "'Unable to create trust relationship') return self.response.headers.add_header( \"Location\", str(Config.root +", "or peer secret) # POST /trust/{relationship}}/{actorid} to send information to", "url = params['url'] else: url = '' if 'relationship' in", "relationship and auto-accept, no # auth required) # GET /trust/{relationship}}/{actorid}", "if self.request.get('approved').lower() == \"true\": approved = True else: approved =", "only creator and admins allowed) # POST /trust/{relationship} with json", "URL') return secret = Config.newToken() new_trust = myself.createReciprocalTrust( url=url, secret=secret,", "'verified': my_trust.verified, 'verificationToken': verificationToken, 'type': my_trust.type, 'desc': my_trust.desc, 'secret': my_trust.secret,", "self.response.set_status(502, 'Not able to delete relationship with peer.') return self.response.set_status(204,", "self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if my_trust.approved: self.response.set_status(200, 'Ok') else: self.response.set_status(202,", "myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if not myself or", "= { 'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid, 'relationship': new_trust.relationship,", "we haven't approved the relationship yet if not check.checkAuthorisation(path='trust', subpath='<type>/<id>',", "content') def delete(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self,", "else: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if not deleted: self.response.set_status(502, 'Not", "\"true\": isPeer = True Config = config.config() relationships = myself.getTrustRelationships(", "auto-accept, no # auth required) # GET /trust/{relationship}}/{actorid} to get", "[] for rel in relationships: pairs.append({ 'baseuri': rel.baseuri, 'id': myself.id,", "with peer.') return self.response.set_status(204, 'Ok') application = webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler,", "isPeer = False if check.trust and check.trust.peerid == peerid: isPeer", "params: if params['approved'] == True or params['approved'].lower() == \"true\": approved", "peerid = self.request.get('peerid') relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid, type=type) if", "for a relationship, assume that peer has approved new_trust =", "self.request.get('_method') == 'PUT': self.put(id, relationship, peerid) return if self.request.get('_method') ==", "= Config.default_relationship type = '' try: params = json.loads(self.request.body.decode('utf-8', 'ignore'))", "return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved = None if", "= '' relationship = self.request.get('relationship') type = self.request.get('type') peerid =", "between this # actor and another (reciprocal relationship) (auth: only", "= auth.init_actingweb(appreq=self, id=id, path='trust') if not myself or check.response[\"code\"] !=", "trustee_root = '' if 'creator' in params: creator = params['creator']", "(see config.py for default relationship and auto-accept, no # auth", "\"/\" + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id': myself.id,", "url = '' if 'relationship' in params: relationship = params['relationship']", "ValueError: self.response.set_status(400, 'No json content') return if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved):", "with query parameters (relationship, type, and peerid) to retrieve trust", "\"Not found\") def put(self, id, relationship): (Config, myself, check) =", "and admins allowed) # POST /trust/{relationship} with json body to", "return pairs = [] for rel in relationships: pairs.append({ 'baseuri':", "return my_trust = relationships[0] # If the peer did a", "except ValueError: if not self.request.get('_method') or self.request.get('_method') != \"PUT\": self.response.set_status(400,", "or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403)", "type = '' if 'secret' in params: secret = params['secret']", "!= 200: return if not check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403) return relationship", "self.response.set_status(404, 'Not found') return pairs = [] for rel in", "content') return if len(trustee_root) > 0: myself.setProperty('trustee_root', trustee_root) if creator:", "peerid=peerid, type=type) if not relationships: self.response.set_status(404, 'Not found') return pairs", "body to create new trust # relationship (see config.py for", "new_trust = myself.createReciprocalTrust( url=url, secret=secret, desc=desc, relationship=relationship, type=type) if not", "new_trust.peerid, 'relationship': new_trust.relationship, 'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified, 'type':", "= params['creator'] else: creator = None except ValueError: self.response.set_status(400, 'No", "secret = '' if 'desc' in params: desc = params['desc']", "my_trust.verificationToken else: verificationToken = '' pair = { 'baseuri': my_trust.baseuri,", "method='POST', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved", "params: peerid = params['id'] else: peerid = '' if 'type'", "webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler, name='rootHandler'), webapp2.Route(r'/<id>/trust/<relationship><:/?>', relationshipHandler, name='relationshipHandler'), webapp2.Route(r'/<id>/trust/<relationship>/<peerid><:/?>', trustHandler, name='trustHandler'),", "check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE', peerid=peerid, approved=False): self.response.set_status(403) return isPeer = False", "required) # GET /trust/{relationship}}/{actorid} to get details on a specific", "if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8',", "myself.id + '/trust/' + new_trust.relationship + '/' + new_trust.peerid)) pair", "the same as /trust if not check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403) return", "= myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if not deleted: self.response.set_status(502, 'Not able to", "import auth import webapp2 import os from google.appengine.ext.webapp import template", "subpath='<type>/<id>', method='PUT', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore'))", "Access is the same as /trust if not check.checkAuthorisation(path='trust', method='POST'):", "try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'trustee_root' in params: trustee_root", "'creator' in params: creator = params['creator'] else: creator = None", "return relationship = '' type = '' peerid = ''", "creator and admins allowed) # POST /trust with json body", "google.appengine.ext.webapp import template import json import logging import datetime import", "== relationship: approved = True else: approved = False #", "myself.modify(creator=creator) self.response.set_status(204, 'No content') def delete(self, id, relationship): (Config, myself,", "== 0 or len(peerid) == 0 or len(type) == 0:", "(relationship, type, and peerid) to retrieve trust relationships (auth: only", "the delete is from the peer) (auth: creator, admin, or", "} out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if my_trust.approved:", "admin, or # peer secret) # Handling requests to trust/", "self.response.set_status(404, \"Not found\") return # Access is the same as", "approved the relationship yet if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE', peerid=peerid,", "new trust # relationship (see config.py for default relationship and", "self.request.get('desc') and len(self.request.get('desc')) > 0: desc = self.request.get('desc') else: desc", "None except ValueError: if not self.request.get('_method') or self.request.get('_method') != \"PUT\":", "'relationship': my_trust.relationship, 'approved': my_trust.approved, 'peer_approved': my_trust.peer_approved, 'verified': my_trust.verified, 'verificationToken': verificationToken,", "200: return if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return secret =", "with a json body to change details on a relationship", "# POST /trust/{relationship} with json body to create new trust", "= myself.createReciprocalTrust( url=url, secret=secret, desc=desc, relationship=relationship, type=type) if not new_trust:", "'Ok') application = webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler, name='rootHandler'), webapp2.Route(r'/<id>/trust/<relationship><:/?>', relationshipHandler, name='relationshipHandler'),", "to specific relationships, e.g. /trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler): def get(self, id,", "'peerid': rel.peerid, 'relationship': rel.relationship, 'approved': rel.approved, 'peer_approved': rel.peer_approved, 'verified': rel.verified,", "rel.relationship, 'approved': rel.approved, 'peer_approved': rel.peer_approved, 'verified': rel.verified, 'type': rel.type, 'desc':", "'verify' in params: verificationToken = params['verify'] else: verificationToken = None", "in params: creator = params['creator'] else: creator = None except", "def delete(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self,", "secret=secret, verificationToken=verificationToken, type=type, peer_approved=True, relationship=relationship, desc=desc) if not new_trust: self.response.set_status(403,", "in params: secret = params['secret'] else: secret = '' if", "relationship (see config.py for default relationship and auto-accept, no #", "type = params['type'] else: type = '' if 'secret' in", "} out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(201, 'Created')", "self.response.set_status(204, 'No content') def delete(self, id, relationship): (Config, myself, check)", "json body to create new trust # relationship (see config.py", "+ \"/\" + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id':", "except ValueError: self.response.set_status(400, 'No json content') return if len(baseuri) ==", "return if relationship != 'trustee': self.response.set_status(404, \"Not found\") return #", "creator and admins allowed) # POST /trust/{relationship} with json body", "return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT', peerid=peerid): self.response.set_status(403) return try:", "self.response.set_status(202, 'Accepted') def post(self, id, relationship, peerid): (Config, myself, check)", "relationship # PUT /trust/{relationship}}/{actorid} with a json body to change", "peer URL') return secret = Config.newToken() new_trust = myself.createReciprocalTrust( url=url,", "method='DELETE', peerid=peerid, approved=False): self.response.set_status(403) return isPeer = False if check.trust", "relationships (auth: only creator and admins allowed) # POST /trust", "myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>',", "modified') def put(self, id, relationship, peerid): (Config, myself, check) =", "python # from actingweb import actor from actingweb import config", "None except ValueError: self.response.set_status(400, 'No json content') return if len(trustee_root)", "self.response.set_status(201, 'Created') else: self.response.set_status(202, 'Accepted') # Handling requests to specific", "way of forcing no deletion of a peer # relationship", "import webapp2 import os from google.appengine.ext.webapp import template import json", "== peerid: isPeer = True else: # Use of GET", "' + str(self.request.headers)) (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust',", "'id': myself.id, 'peerid': rel.peerid, 'relationship': rel.relationship, 'approved': rel.approved, 'peer_approved': rel.peer_approved,", "str(Config.root + myself.id + '/trust/' + new_trust.relationship + \"/\" +", "requestor is not a peer (primarily for testing purposes) peerGet", "relationship: approved = True else: approved = False # Since", "verificationToken = None except ValueError: self.response.set_status(400, 'No json content') return", "id): if self.request.get('_method') == 'POST': self.post(id) return (Config, myself, check)", "delete a relationship (with # ?peer=true if the delete is", "method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri'", "if check.trust and check.trust.peerid == peerid and not my_trust.verified: my_trust.modify(verified=True)", "import json import logging import datetime import time # /trust", "def post(self, id): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust')", "'type' in params: type = params['type'] else: type = ''", "relationship = Config.default_relationship type = '' try: params = json.loads(self.request.body.decode('utf-8',", "if not check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403) return relationship = '' type", "None except ValueError: self.response.set_status(400, 'No json content') return if len(baseuri)", "> 0: baseuri = self.request.get('baseuri') else: baseuri = '' if", "allowed) # POST /trust/{relationship} with json body to create new", "return if len(baseuri) == 0 or len(peerid) == 0 or", "= None except ValueError: if not self.request.get('_method') or self.request.get('_method') !=", "peerid: isPeer = True else: # Use of GET param", "Config.default_relationship == relationship: approved = True else: approved = False", "else: baseuri = '' if self.request.get('desc') and len(self.request.get('desc')) > 0:", "'' if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri, approved=approved, desc=desc): self.response.set_status(204, 'Ok') else:", "'secret': new_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\"", "0 or len(type) == 0: self.response.set_status(400, 'Missing mandatory attributes') return", "is not a peer (primarily for testing purposes) peerGet =", "check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8',", "my_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if", "to get details on a specific relationship (auth: creator, admin,", "id=id, path='trust', subpath=relationship, add_response=False) if not myself or (check.response[\"code\"] !=", "json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(200, 'Ok') def post(self, id):", "self.put(id, relationship, peerid) return if self.request.get('_method') == 'DELETE': self.delete(id, relationship,", "if 'secret' in params: secret = params['secret'] else: secret =", "'Not modified') def delete(self, id, relationship, peerid): (Config, myself, check)", "/trust/{relationship} with json body to create new trust # relationship", "in params: verificationToken = params['verify'] else: verificationToken = None except", "and Config.default_relationship == relationship: approved = True else: approved =", "yet if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE', peerid=peerid, approved=False): self.response.set_status(403) return", "self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(200, 'Ok') def post(self, id): (Config,", "?peer=true if the delete is from the peer) (auth: creator,", "my_trust.verified: my_trust.modify(verified=True) verificationToken = my_trust.verificationToken else: verificationToken = '' pair", "relationship (baseuri, secret, desc) (auth: creator, # admin, or peer", "= \"application/json\" if my_trust.approved: self.response.set_status(200, 'Ok') else: self.response.set_status(202, 'Accepted') def", "relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404, 'Not", "check) = auth.init_actingweb(appreq=self, id=id, path='trust') if not myself or check.response[\"code\"]", "'' if 'verify' in params: verificationToken = params['verify'] else: verificationToken", "(baseuri, secret, desc) (auth: creator, # admin, or peer secret)", "json content') return if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved): self.response.set_status(204, 'Ok') else:", "'type': my_trust.type, 'desc': my_trust.desc, 'secret': my_trust.secret, } out = json.dumps(pair)", "{ 'baseuri': my_trust.baseuri, 'id': myself.id, 'peerid': my_trust.peerid, 'relationship': my_trust.relationship, 'approved':", "\"Location\", str(Config.root + myself.id + '/trust/' + new_trust.relationship + '/'", "rel.approved, 'peer_approved': rel.peer_approved, 'verified': rel.verified, 'type': rel.type, 'desc': rel.desc, 'secret':", "body to change details on a relationship (baseuri, secret, desc)", "else: desc = '' if 'verify' in params: verificationToken =", "creator = None except ValueError: self.response.set_status(400, 'No json content') return", "approved = None except ValueError: if not self.request.get('_method') or self.request.get('_method')", "auth.add_auth_response(appreq=self, auth_obj=check) return # We allow non-approved peers to delete", "(primarily for testing purposes) peerGet = self.request.get('peer').lower() if peerGet.lower() ==", "application = webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler, name='rootHandler'), webapp2.Route(r'/<id>/trust/<relationship><:/?>', relationshipHandler, name='relationshipHandler'), webapp2.Route(r'/<id>/trust/<relationship>/<peerid><:/?>',", "try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in params: baseuri", "mandatory attributes') return if Config.auto_accept_default_relationship and Config.default_relationship == relationship: approved", "/trust with json body to initiate a trust relationship between", "new_trust = myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved, secret=secret, verificationToken=verificationToken, type=type, peer_approved=True, relationship=relationship,", "Since we received a request for a relationship, assume that", "'approved': my_trust.approved, 'peer_approved': my_trust.peer_approved, 'verified': my_trust.verified, 'verificationToken': verificationToken, 'type': my_trust.type,", "else: self.response.set_status(202, 'Accepted') def post(self, id, relationship, peerid): (Config, myself,", "self.response.set_status(400, 'Missing mandatory attributes') return if Config.auto_accept_default_relationship and Config.default_relationship ==", "approved new_trust = myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved, secret=secret, verificationToken=verificationToken, type=type, peer_approved=True,", "e.g. /trust/friend class relationshipHandler(webapp2.RequestHandler): def get(self, id, relationship): if self.request.get('_method')", "# from actingweb import actor from actingweb import config from" ]
[ "strategy for 3D lines that go past the camera. Formerly", "prediction data in python. - Skyborg \"\"\", '1.1.3': \"\"\" -", "Skyborg - Java bots will now shut down when the", "to another team in the GUI no longer breaks the", "in python. - Skyborg - Java bots will now shut", "the position, velocity, etc of the ball and the cars!", "preset loading to allow multiple agents to saved/loaded correctly if", "for Rocket League patch 1.50. Compared to previous version: -", "(_) | |_ 010010 10010 \\_| \\_\\_____/\\____/ \\___/ \\__| 01001", "by real controller input. - Creating new presets in the", "to double jump. -tarehart \"\"\", '1.0.6': \"\"\" The latest Rocket", "auto-running java bots during tournaments. To take advantage of this", "You can now play on Salty Shores thanks to hallo_doei", "errors. - Boost amount for cars will now round up", "the cars! This can be a great help during bot", "game tick data during replays and the postgame. - tarehart", "way to access ball prediction data in python. - Skyborg", "has been decreased again because many people experienced errors related", "Fix for items with a ':' not showing up in", "pad data is missing - Loadout configuration is broken Thanks", "- ccman32 and hallo-doei - Fixed bug resulting in incorrect", "Giving specific error messages when cfg files are messed up.", "https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more details! - Faster way to access ball", "ball prediction data in python. - Skyborg - Java bots", "inputs are received for each player index when you press", "integer, so 0.3% boost will now appear as 1 instead", "typo in rlbot.cfg - Redox - Fancy release notes -", "provide a list of future ball positions based on chip's", "to hallo_doei - Bug fix for people with spaces in", "files with relative paths for agents. Fixed agent preset loading", "tarehart Bonus: - You can now play on Salty Shores", "Other changes: - The loadout config for orange team is", "This means you can draw a lot of lines at", "was found. Updated GUI to launch Rocket League when clicking", "that inputs are received for each player index when you", "for GUI not saving correct path - hallo-doei - Fix", "140. \"\"\", '1.2.5': \"\"\" *************************************************** * Fix for dodge cancels", "manager - Eastvillage - Fix for items with a ':'", "the \"Run\" button twice in a row in the GUI", "is missing - Boost pad data is missing - Loadout", "It's a lower-level representation of physics data which updates at", "patch 1.50. Compared to previous version: - Dropshot tile data", "not hoops or dropshot. Documentation and examples can be found", "Rocket League is patched - ccman32 and hallo-doei - Fixed", "broke dodges for our bots; this update fixes it. \"\"\",", "is now respected again. - ccman32 - Fixed a bug", "dropshot mode. It was reporting 2 goals rather than the", "when trying to write to stderr. - Dragging bots to", "play on Salty Shores thanks to hallo_doei - Bug fix", "double the original. \"\"\", '1.0.4': \"\"\" - Maximum size for", "where bots would dodge when they intended to double jump.", "effect, you can no longer see up-to-date player data during", "many people experienced errors related to memory access. The limit", "of this in your bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug fixes:", "- You can now play on Salty Shores thanks to", "You can still make a great bot without it, but", "fix quickly! \"\"\", '0.0.31': \"\"\" Rapid response to Rocket League", "now only double the original. \"\"\", '1.0.4': \"\"\" - Maximum", "- Bug fix for people with spaces in their file", "correct path - hallo-doei - Fix for GUI crash when", "'1.1.1': \"\"\" You can now get information about the ball's", "_ 10100 | ___ \\ | | ___ \\ |", "in Dropshot mode thanks to hallo_doei! Read all about it", "New core dll that is less likely to break when", "bug fixes: - Fixed a bug where auto-run executables would", "'1.1.3': \"\"\" - Faster way to access ball prediction data", "Formerly it was \"draw it, even though it's crazy sometimes\",", "fix for Rocket League patch 1.50. Compared to previous version:", "when cfg files are messed up. \"\"\", '1.2.2': \"\"\" -", "received for each player index when you press the [home]", "avoid buggy situations. - Shutting down the python framework will", "or dropshot. Documentation and examples can be found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction", "\"\"\", '0.0.30': \"\"\" - New core dll that is less", "when no default RLBot.cfg file was found. Updated GUI to", "based on chip's excellent physics modeling. Take advantage of this", "- Fancy release notes - tarehart and Skyborg \"\"\" }", "no longer see up-to-date player data during instant replays. \"\"\",", "RUN button behavior in the GUI would not work after", "to kill bots twice in a row. - Clicking on", "Fixed bug resulting in incorrect quickchat - dtracers - Added", "row. - Clicking on the \"Run\" button twice in a", "- hallo-doei - Fix for GUI not saving correct path", "\"\"\", '1.3.0': \"\"\" Accurate ball prediction for Hoops and Dropshot", "trying to write to stderr. - Dragging bots to another", "data in python. - Skyborg \"\"\", '1.1.3': \"\"\" - Faster", "the rate that inputs are received for each player index", "the postgame. - tarehart - Fixed a bug where bots", "we DO have two great new features! 1. Setting game", "\"\"\", '1.1.1': \"\"\" You can now get information about the", "once without getting errors. - Boost amount for cars will", "- ima9rd \"\"\", '1.4.2': \"\"\" Adding support for auto-running java", "boost will now appear as 1 instead of 0. -", "1) we don't load dependencies by storing it in __init__.py", "\"\"\" Maximum size for a render message has been decreased", "then canceling - hallo-doei - Adding file checking before injection", "the GUI would not work after killing bots. \"\"\", '1.2.0':", "GUI would not work after killing bots. \"\"\", '1.2.0': \"\"\"", "behavior in the GUI would not work after killing bots.", "they have the same name. - ima9rd \"\"\", '1.6.0':\"\"\" Add", "Rocket League patch broke dodges for our bots; this update", "crash when trying to write to stderr. - Dragging bots", "fix quickly. We will follow this up with a proper", "ask for instructions on discord. \"\"\", '0.0.30': \"\"\" - New", "This can be a great help during bot development, and", "this short-term fix quickly. We will follow this up with", "it was \"draw it, even though it's crazy sometimes\", now", "- Loadout configuration is fixed Thanks to ccman32 and dtracers", "all about it at https://github.com/RLBot/RLBot/wiki/Dropshot Other changes: - The loadout", "ends. As a side effect, you can no longer see", "# Store the version here so: # 1) we don't", "are only accurate on the standard arena, not hoops or", "we can import it into your module module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package", "agents. Fixed agent preset loading to allow multiple agents to", "- dtracers - Added more built-in colors to the python", "storing it in __init__.py # 2) we can import it", "saving preset then canceling - hallo-doei - Adding file checking", "Got rid of the libpng warning seen when using the", "support for auto starting .NET executables. \"\"\", '1.5.1': \"\"\" Fixed", "also get creative with it. Visit the wiki for details", "access. The limit is now only double the original. \"\"\",", "'1.2.6': \"\"\" Fixed a bug where field info was not", "Rocket League 1.49 and RLBot 0.0.30, ask for instructions on", "would dodge when they intended to double jump. -tarehart \"\"\",", "key. Toggle back off with the [end] key. - Fixed", "when loading certain RLBot config files with relative paths for", "have_internet helper function to help streamline upgrade checks. - ima9rd", "side effect, you can no longer see up-to-date player data", "quickly! \"\"\", '0.0.31': \"\"\" Rapid response to Rocket League patch", "of this to do next-level wall reads, catches, and dribbles!", "can still make a great bot without it, but this", "path by Zaptive - Subprocess agent for future Rust support", "Adding support for auto-running java bots during tournaments. To take", "of the ball and the cars! This can be a", "dodge cancels / half flips! - ccman32 * *************************************************** Plus:", "instant replays. \"\"\", '1.0.3': \"\"\" Time for the big 1.0", "with a ':' not showing up in the GUI -", "in their file path by Zaptive - Subprocess agent for", "- Giving specific error messages when cfg files are messed", "only accurate on the standard arena, not hoops or dropshot.", "features! 1. Setting game state. You can manipulate the position,", "helper function to help streamline upgrade checks. - ima9rd \"\"\",", "wall reads, catches, and dribbles! You can read about the", "ball's status in Dropshot mode thanks to hallo_doei! Read all", "effectively. - Fixed bug where RUN button behavior in the", "was reporting 2 goals rather than the expected 140. \"\"\",", "'1.5.0': \"\"\" Adding a have_internet helper function to help streamline", "\"\"\", '1.0.3': \"\"\" Time for the big 1.0 release! We", "path - hallo-doei - Fix for GUI crash when saving", "GUI no longer breaks the config. \"\"\", '1.3.0': \"\"\" Accurate", "to access ball prediction data in python. - Skyborg -", "offer a 'RigidBodyTick' thanks to whatisaphone! It's a lower-level representation", "has been increased by a factor of 100. This means", "- Got rid of the libpng warning seen when using", "clicking run if no Rocket League process is found. -", "cancels / half flips! - ccman32 * *************************************************** Plus: -", "currently the wall bounces are only accurate on the standard", "the GUI works better now. - Got rid of the", "was not extracted properly during dropshot mode. It was reporting", "Dropshot tile data is fixed - Boost pad data is", "/| | | ___ \\/ _ \\| __| 01101100 010010", "great help during bot development, and you can also get", "ball and the cars! This can be a great help", "game state. You can manipulate the position, velocity, etc of", "GUI works better now. - Got rid of the libpng", "see up-to-date player data during instant replays. \"\"\", '1.0.3': \"\"\"", "in your bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug fixes: - Fixed", "Store the version here so: # 1) we don't load", "import it into your module module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ =", "reads, catches, and dribbles! You can read about the math", "'1.4.2': \"\"\" Adding support for auto-running java bots during tournaments.", "'RigidBodyTick' thanks to whatisaphone! It's a lower-level representation of physics", "for a render message has been increased by a factor", "actually left \"beta\" a long time ago so this isn't", "though it's crazy sometimes\", now it will be \"don't draw", "- Fix for GUI crash when saving preset then canceling", "configs more effectively. - Fixed bug where RUN button behavior", "'1.0.6': \"\"\" The latest Rocket League patch broke dodges for", "no default RLBot.cfg file was found. Updated GUI to launch", "'1.2.5': \"\"\" *************************************************** * Fix for dodge cancels / half", "update fixes it. \"\"\", '1.0.5': \"\"\" Maximum size for a", "before injection (Resolves #167) - Redox - Fixed typo in", "|_/ / | | |_/ / ___ | |_ 110011", "ball prediction data in python. - Skyborg \"\"\", '1.1.3': \"\"\"", "etc of the ball and the cars! This can be", "/ half flips! - ccman32 * *************************************************** Plus: - Changing", "no longer attempt to kill bots twice in a row.", "loadout config for orange team is now respected again. -", "processes. \"\"\", '1.1.2': \"\"\" Faster way to access ball prediction", "for delivering this fix quickly! \"\"\", '0.0.31': \"\"\" Rapid response", "- Faster way to access ball prediction data in python.", "the python framework will no longer attempt to kill bots", "- Java bots will now shut down when the python", "extracted properly during dropshot mode. It was reporting 2 goals", "is fixed Thanks to ccman32 and dtracers for delivering this", "incorrect quickchat - dtracers - Added more built-in colors to", "with the [end] key. - Fixed a bug where party_member_bot", "ccman32 and hallo-doei - Fixed bug resulting in incorrect quickchat", "agent for future Rust support by whatisaphone \"\"\", '0.0.32': \"\"\"", "\"\"\", '1.0.6': \"\"\" The latest Rocket League patch broke dodges", "GUI crash when loading certain RLBot config files with relative", "the version here so: # 1) we don't load dependencies", "default RLBot.cfg file was found. Updated GUI to launch Rocket", "executables would crash when trying to write to stderr. -", "Setting game state. You can manipulate the position, velocity, etc", "Salty Shores thanks to hallo_doei - Bug fix for people", "Fixed a crash that would commonly happen after a match", "match ends. As a side effect, you can no longer", "GUI when no default RLBot.cfg file was found. Updated GUI", "read about the math involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently the", "checks. - ima9rd \"\"\", '1.4.2': \"\"\" Adding support for auto-running", "list of future ball positions based on chip's excellent physics", "and dribbles! You can read about the math involved here:", "stay on Rocket League 1.49 and RLBot 0.0.30, ask for", "module module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.6.1' release_notes = {", "- tarehart - Fixed a bug where bots would dodge", "shut down when the python framework quits. This has been", "following known issues: - Dropshot tile data is missing -", "of 0. - Fixed a crash that would commonly happen", "\"\"\", '1.2.2': \"\"\" - Rearranged the GUI a bit, and", "up in the GUI - hallo-doei - Fix for GUI", "it. Visit the wiki for details and documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State", "when using the GUI. - Giving specific error messages when", "items with a ':' not showing up in the GUI", "Loadout configuration is broken Thanks to ccman32 and dtracers for", "missing - Boost pad data is missing - Loadout configuration", "so 0.3% boost will now appear as 1 instead of", "during bot development, and you can also get creative with", "creative with it. Visit the wiki for details and documentation", "version: - Dropshot tile data is fixed - Boost pad", "party_member_bot could get influenced by real controller input. - Creating", "file checking before injection (Resolves #167) - Redox - Fixed", "'1.3.0': \"\"\" Accurate ball prediction for Hoops and Dropshot modes!", "seen when using the GUI. - Giving specific error messages", "to interpolation. You can still make a great bot without", "| ___ \\ | | ___ \\ | | 00101", "- Eastvillage - Fix for items with a ':' not", "Fix for dodge cancels / half flips! - ccman32 *", "/ (_) | |_ 010010 10010 \\_| \\_\\_____/\\____/ \\___/ \\__|", "__version__ in release_notes: return release_notes[__version__] return '' def get_help_text(): return", "to memory access. The limit is now only double the", "bots. \"\"\", '1.2.0': \"\"\" - We now offer a 'RigidBodyTick'", "will now round up to the nearest integer, so 0.3%", "by a factor of 100. This means you can draw", "League 1.49 and RLBot 0.0.30, ask for instructions on discord.", "data is missing - Boost pad data is missing -", "cfg files are messed up. \"\"\", '1.2.2': \"\"\" - Rearranged", "patch 1.50 with the following known issues: - Dropshot tile", "tarehart and Skyborg \"\"\" } release_banner = \"\"\" ______ _", "tick data during replays and the postgame. - tarehart -", "because many people experienced errors related to memory access. The", "release_notes: return release_notes[__version__] return '' def get_help_text(): return \"Trouble? Ask", "no longer breaks the config. \"\"\", '1.3.0': \"\"\" Accurate ball", "to avoid buggy situations. - Shutting down the python framework", "of the libpng warning seen when using the GUI. -", "fixes: - Fixed a bug where auto-run executables would crash", "hallo-doei - Fix for GUI not saving correct path -", "can import it in setup.py for the same reason #", "- Fixed a bug where bots would dodge when they", "the [end] key. - Fixed a bug where party_member_bot could", "arena, not hoops or dropshot. Documentation and examples can be", "[home] key. Toggle back off with the [end] key. -", "https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently the wall bounces are only accurate on", "to ccman32 and dtracers for delivering this short-term fix quickly.", "when they intended to double jump. -tarehart \"\"\", '1.0.6': \"\"\"", "is now only double the original. \"\"\", '1.0.4': \"\"\" -", "future Rust support by whatisaphone \"\"\", '0.0.32': \"\"\" More comprehensive", "intended to double jump. -tarehart \"\"\", '1.0.6': \"\"\" The latest", "rendering manager - Eastvillage - Fix for items with a", "for future Rust support by whatisaphone \"\"\", '0.0.32': \"\"\" More", "the GUI. - Giving specific error messages when cfg files", "ccman32 - Fixed a bug where the GUI would crash", "ccman32, and tarehart 2. Ball prediction. We now provide a", "great bot without it, but this feature is quite nice", "Compared to previous version: - Dropshot tile data is fixed", "Take advantage of this to do next-level wall reads, catches,", "data in python. - Skyborg \"\"\", '1.1.1': \"\"\" You can", "a row. - Clicking on the \"Run\" button twice in", "appearance configs more effectively. - Fixed bug where RUN button", "\"\"\", '1.6.0':\"\"\" Add support for auto starting .NET executables. \"\"\",", "work after killing bots. \"\"\", '1.2.0': \"\"\" - We now", "still make a great bot without it, but this feature", "postgame. - tarehart - Fixed a bug where bots would", "as 1 instead of 0. - Fixed a crash that", "original. \"\"\", '1.0.4': \"\"\" - Maximum size for a render", "instructions on discord. \"\"\", '0.0.30': \"\"\" - New core dll", "wall bounces are only accurate on the standard arena, not", "- hallo_doei - Avoiding and suppressing some game crashes, and", "past the camera. Formerly it was \"draw it, even though", "would not work after killing bots. \"\"\", '1.2.0': \"\"\" -", "access ball prediction data in python. - Skyborg - Java", "To take advantage of this in your bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java", "not work after killing bots. \"\"\", '1.2.0': \"\"\" - We", "after killing bots. \"\"\", '1.2.0': \"\"\" - We now offer", "development, and you can also get creative with it. Visit", "rate that inputs are received for each player index when", "injection (Resolves #167) - Redox - Fixed typo in rlbot.cfg", "it, even though it's crazy sometimes\", now it will be", "isn't as big a milestone as the number implies, but", "the GUI would crash with a \"KeyError\". - hallo_doei -", "___ \\ | | ___ \\ | | 00101 110011", "\" \\ \"or report an issue at https://github.com/RLBot/RLBot/issues\" def print_current_release_notes():", "correctly if they have the same name. - ima9rd \"\"\",", "configuration is broken Thanks to ccman32 and dtracers for delivering", "Skyborg \"\"\", '1.1.3': \"\"\" - Faster way to access ball", "_ \\| __| 01101100 010010 | |\\ \\| |____| |_/", "/ ___ | |_ 110011 00110110 | /| | |", "ccman32 * *************************************************** Plus: - Changing the rendering strategy for", "config. \"\"\", '1.3.0': \"\"\" Accurate ball prediction for Hoops and", "by whatisaphone \"\"\", '0.0.32': \"\"\" More comprehensive fix for Rocket", "thanks to hallo_doei - Bug fix for people with spaces", "at once without getting errors. - Boost amount for cars", "showing up in the GUI - hallo-doei - Fix for", "# 1) we don't load dependencies by storing it in", "some game crashes, and also restoring the ability to get", "\"or report an issue at https://github.com/RLBot/RLBot/issues\" def print_current_release_notes(): print(release_banner) print(\"Version", "https://discord.gg/5cNbXgG \" \\ \"or report an issue at https://github.com/RLBot/RLBot/issues\" def", "dribbles! You can read about the math involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/", "new presets in the GUI works better now. - Got", "the [home] key. Toggle back off with the [end] key.", "been necessary recently to avoid buggy situations. - Shutting down", "in incorrect quickchat - dtracers - Added more built-in colors", "follow this up with a proper fix as soon as", "- tarehart and Skyborg \"\"\" } release_banner = \"\"\" ______", "memory access. The limit is now only double the original.", "getting errors. - Boost amount for cars will now round", "| |_ 110011 00110110 | /| | | ___ \\/", "situations. - Shutting down the python framework will no longer", "'1.0.3': \"\"\" Time for the big 1.0 release! We actually", "so: # 1) we don't load dependencies by storing it", "increased by a factor of 100. This means you can", "is found. - ima9rd \"\"\", '1.5.0': \"\"\" Adding a have_internet", "but this feature is quite nice for the scientists among", "warning seen when using the GUI. - Giving specific error", "bots to another team in the GUI no longer breaks", "Ball prediction. We now provide a list of future ball", "- Creating new presets in the GUI works better now.", "League is patched - ccman32 and hallo-doei - Fixed bug", "when Rocket League is patched - ccman32 and hallo-doei -", "Rust support by whatisaphone \"\"\", '0.0.32': \"\"\" More comprehensive fix", "of physics data which updates at 120Hz and is not", "when saving preset then canceling - hallo-doei - Adding file", "with a proper fix as soon as possible. You may", "'1.2.0': \"\"\" - We now offer a 'RigidBodyTick' thanks to", "ccman32 and dtracers for delivering this short-term fix quickly. We", "and Dropshot modes! - Kipje13, Marvin, NeverCast, et. al. \"\"\",", "to help streamline upgrade checks. - ima9rd \"\"\", '1.4.2': \"\"\"", "- Boost amount for cars will now round up to", "120Hz and is not subject to interpolation. You can still", "for a render message has been decreased again because many", "delivering this fix quickly! \"\"\", '0.0.31': \"\"\" Rapid response to", "Hoops and Dropshot modes! - Kipje13, Marvin, NeverCast, et. al.", "python. - Skyborg \"\"\", '1.1.1': \"\"\" You can now get", "Rapid response to Rocket League patch 1.50 with the following", "110011 | |_/ / | | |_/ / ___ |", "canceling - hallo-doei - Adding file checking before injection (Resolves", "for the big 1.0 release! We actually left \"beta\" a", "great new features! 1. Setting game state. You can manipulate", "to previous version: - Dropshot tile data is fixed -", "0. - Fixed a crash that would commonly happen after", "config files with relative paths for agents. Fixed agent preset", "can no longer see up-to-date player data during instant replays.", "data in python. - Skyborg - Java bots will now", "League patch 1.50. Compared to previous version: - Dropshot tile", "load dependencies by storing it in __init__.py # 2) we", "works better now. - Got rid of the libpng warning", "of lines at once without getting errors. - Boost amount", "is less likely to break when Rocket League is patched", "01001 \"\"\" def get_current_release_notes(): if __version__ in release_notes: return release_notes[__version__]", "- Skyborg - Java bots will now shut down when", "issue at https://github.com/RLBot/RLBot/issues\" def print_current_release_notes(): print(release_banner) print(\"Version {}\".format(__version__)) print(get_current_release_notes()) print(get_help_text())", "in a row in the GUI will no longer spawn", "The loadout config for orange team is now respected again.", "fixed - Loadout configuration is fixed Thanks to ccman32 and", "the python rendering manager - Eastvillage - Fix for items", "will no longer attempt to kill bots twice in a", "buggy situations. - Shutting down the python framework will no", "fixed Thanks to ccman32 and dtracers for delivering this fix", "\"\"\" Adding support for auto-running java bots during tournaments. To", "mode thanks to hallo_doei! Read all about it at https://github.com/RLBot/RLBot/wiki/Dropshot", "Fix for GUI not saving correct path - hallo-doei -", "League patch broke dodges for our bots; this update fixes", "nearest integer, so 0.3% boost will now appear as 1", "now round up to the nearest integer, so 0.3% boost", "expected 140. \"\"\", '1.2.5': \"\"\" *************************************************** * Fix for dodge", "- Boost pad data is missing - Loadout configuration is", "- Dropshot tile data is fixed - Boost pad data", "a bit, and made it load and track appearance configs", "jump. -tarehart \"\"\", '1.0.6': \"\"\" The latest Rocket League patch", "this update fixes it. \"\"\", '1.0.5': \"\"\" Maximum size for", "Code written by hallo_doei, ccman32, and tarehart 2. Ball prediction.", "Eastvillage - Fix for items with a ':' not showing", "thanks to whatisaphone! It's a lower-level representation of physics data", "have the same name. - ima9rd \"\"\", '1.6.0':\"\"\" Add support", "at https://discord.gg/5cNbXgG \" \\ \"or report an issue at https://github.com/RLBot/RLBot/issues\"", "which updates at 120Hz and is not subject to interpolation.", "tile data is fixed - Boost pad data is fixed", "built-in colors to the python rendering manager - Eastvillage -", "physics modeling. Take advantage of this to do next-level wall", "hoops or dropshot. Documentation and examples can be found here:", "in release_notes: return release_notes[__version__] return '' def get_help_text(): return \"Trouble?", "- hallo-doei - Fix for GUI crash when saving preset", "League patch 1.50 with the following known issues: - Dropshot", "Rocket League when clicking run if no Rocket League process", "not subject to interpolation. You can still make a great", "\"\"\" - Rearranged the GUI a bit, and made it", "updates at 120Hz and is not subject to interpolation. You", "modes! - Kipje13, Marvin, NeverCast, et. al. \"\"\", '1.2.6': \"\"\"", "discord. \"\"\", '0.0.30': \"\"\" - New core dll that is", "this isn't as big a milestone as the number implies,", "the math involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently the wall bounces", "| 00101 110011 | |_/ / | | |_/ /", "choose to stay on Rocket League 1.49 and RLBot 0.0.30,", "quickly. We will follow this up with a proper fix", "in python. - Skyborg \"\"\", '1.1.3': \"\"\" - Faster way", "sometimes\", now it will be \"don't draw it\". - Showing", "at 120Hz and is not subject to interpolation. You can", "*************************************************** * Fix for dodge cancels / half flips! -", "} release_banner = \"\"\" ______ _ ______ _ 10100 |", "reporting 2 goals rather than the expected 140. \"\"\", '1.2.5':", "the following known issues: - Dropshot tile data is missing", "Accurate ball prediction for Hoops and Dropshot modes! - Kipje13,", "Dropshot tile data is missing - Boost pad data is", "\"\"\" Fixed GUI crash when loading certain RLBot config files", "found. Updated GUI to launch Rocket League when clicking run", "\"\"\" Time for the big 1.0 release! We actually left", "it into your module module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.6.1'", "reason # 3) we can import it into your module", "a have_internet helper function to help streamline upgrade checks. -", "files are messed up. \"\"\", '1.2.2': \"\"\" - Rearranged the", "You may also choose to stay on Rocket League 1.49", "a side effect, you can no longer see up-to-date player", "GUI. - Giving specific error messages when cfg files are", "on discord. \"\"\", '0.0.30': \"\"\" - New core dll that", "the GUI will no longer spawn duplicate processes. \"\"\", '1.1.2':", "message has been decreased again because many people experienced errors", "can draw a lot of lines at once without getting", "where RUN button behavior in the GUI would not work", "\"\"\", '0.0.31': \"\"\" Rapid response to Rocket League patch 1.50", "input. - Creating new presets in the GUI works better", "the GUI no longer breaks the config. \"\"\", '1.3.0': \"\"\"", "been increased by a factor of 100. This means you", "can be found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written by chip and", "if they have the same name. - ima9rd \"\"\", '1.6.0':\"\"\"", "Fixed a bug where party_member_bot could get influenced by real", "and track appearance configs more effectively. - Fixed bug where", "version here so: # 1) we don't load dependencies by", "Shores thanks to hallo_doei - Bug fix for people with", "'1.6.1': \"\"\" Fixed GUI crash when loading certain RLBot config", "- Fixed bug resulting in incorrect quickchat - dtracers -", "1.50. Compared to previous version: - Dropshot tile data is", "process is found. - ima9rd \"\"\", '1.5.0': \"\"\" Adding a", "| | ___ \\/ _ \\| __| 01101100 010010 |", "the rendering strategy for 3D lines that go past the", "config for orange team is now respected again. - ccman32", "this to do next-level wall reads, catches, and dribbles! You", "data which updates at 120Hz and is not subject to", "amount for cars will now round up to the nearest", "| |\\ \\| |____| |_/ / (_) | |_ 010010", "a great bot without it, but this feature is quite", "Fancy release notes - tarehart and Skyborg \"\"\" } release_banner", "killing bots. \"\"\", '1.2.0': \"\"\" - We now offer a", "release! We actually left \"beta\" a long time ago so", "support for auto-running java bots during tournaments. To take advantage", "GUI will no longer spawn duplicate processes. \"\"\", '1.1.2': \"\"\"", "in the GUI works better now. - Got rid of", "twice in a row. - Clicking on the \"Run\" button", "for GUI crash when saving preset then canceling - hallo-doei", "for dodge cancels / half flips! - ccman32 * ***************************************************", "up with a proper fix as soon as possible. You", "100. This means you can draw a lot of lines", "size for a render message has been decreased again because", "RLBot config files with relative paths for agents. Fixed agent", "':' not showing up in the GUI - hallo-doei -", "Boost pad data is missing - Loadout configuration is broken", "with relative paths for agents. Fixed agent preset loading to", "- Subprocess agent for future Rust support by whatisaphone \"\"\",", "for details and documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written by hallo_doei,", "# 3) we can import it into your module module", "mode. It was reporting 2 goals rather than the expected", "now it will be \"don't draw it\". - Showing the", "round up to the nearest integer, so 0.3% boost will", "'1.6.0':\"\"\" Add support for auto starting .NET executables. \"\"\", '1.5.1':", "As a side effect, you can no longer see up-to-date", "again. - ccman32 - Fixed a bug where the GUI", "data is fixed - Loadout configuration is fixed Thanks to", "about it at https://github.com/RLBot/RLBot/wiki/Dropshot Other changes: - The loadout config", "= '1.6.1' release_notes = { '1.6.1': \"\"\" Fixed GUI crash", "big 1.0 release! We actually left \"beta\" a long time", "Dropshot mode thanks to hallo_doei! Read all about it at", "their file path by Zaptive - Subprocess agent for future", "button twice in a row in the GUI will no", "The limit is now only double the original. \"\"\", '1.0.4':", "for cars will now round up to the nearest integer,", "a great help during bot development, and you can also", "during dropshot mode. It was reporting 2 goals rather than", "replays and the postgame. - tarehart - Fixed a bug", "resulting in incorrect quickchat - dtracers - Added more built-in", "\"\"\" Rapid response to Rocket League patch 1.50 with the", "- We now offer a 'RigidBodyTick' thanks to whatisaphone! It's", "interpolation. You can still make a great bot without it,", "'1.0.4': \"\"\" - Maximum size for a render message has", "cars! This can be a great help during bot development,", "only double the original. \"\"\", '1.0.4': \"\"\" - Maximum size", "Fixed typo in rlbot.cfg - Redox - Fancy release notes", "- Fixed a bug where party_member_bot could get influenced by", "now appear as 1 instead of 0. - Fixed a", "- Fixed typo in rlbot.cfg - Redox - Fancy release", "in rlbot.cfg - Redox - Fancy release notes - tarehart", "more details! - Faster way to access ball prediction data", "the same name. - ima9rd \"\"\", '1.6.0':\"\"\" Add support for", "be \"don't draw it\". - Showing the rate that inputs", "orange team is now respected again. - ccman32 - Fixed", "during instant replays. \"\"\", '1.0.3': \"\"\" Time for the big", "possible. You may also choose to stay on Rocket League", "to access ball prediction data in python. - Skyborg \"\"\",", "player index when you press the [home] key. Toggle back", "a \"KeyError\". - hallo_doei - Avoiding and suppressing some game", "prediction data in python. - Skyborg \"\"\", '1.1.1': \"\"\" You", "be found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written by chip and tarehart", "crazy sometimes\", now it will be \"don't draw it\". -", "replays. \"\"\", '1.0.3': \"\"\" Time for the big 1.0 release!", "known issues: - Dropshot tile data is missing - Boost", "Time for the big 1.0 release! We actually left \"beta\"", "this up with a proper fix as soon as possible.", "RLBot 0.0.30, ask for instructions on discord. \"\"\", '0.0.30': \"\"\"", "More comprehensive fix for Rocket League patch 1.50. Compared to", "to hallo_doei! Read all about it at https://github.com/RLBot/RLBot/wiki/Dropshot Other changes:", "involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently the wall bounces are only", "found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written by chip and tarehart Bonus:", "get_current_release_notes(): if __version__ in release_notes: return release_notes[__version__] return '' def", "at https://github.com/RLBot/RLBot/wiki/Dropshot Other changes: - The loadout config for orange", "where field info was not extracted properly during dropshot mode.", "fix for people with spaces in their file path by", "to the python rendering manager - Eastvillage - Fix for", "bug where RUN button behavior in the GUI would not", "now get information about the ball's status in Dropshot mode", "the python framework quits. This has been necessary recently to", "10010 \\_| \\_\\_____/\\____/ \\___/ \\__| 01001 \"\"\" def get_current_release_notes(): if", "'1.5.1': \"\"\" Fixed crash with GUI when no default RLBot.cfg", "| |_ 010010 10010 \\_| \\_\\_____/\\____/ \\___/ \\__| 01001 \"\"\"", "crash when saving preset then canceling - hallo-doei - Adding", "colors to the python rendering manager - Eastvillage - Fix", "rlbot.cfg - Redox - Fancy release notes - tarehart and", "Clicking on the \"Run\" button twice in a row in", "dtracers for delivering this fix quickly! \"\"\", '0.0.31': \"\"\" Rapid", "a bug where the GUI would crash with a \"KeyError\".", "to whatisaphone! It's a lower-level representation of physics data which", "we don't load dependencies by storing it in __init__.py #", "so this isn't as big a milestone as the number", "We actually left \"beta\" a long time ago so this", "prediction for Hoops and Dropshot modes! - Kipje13, Marvin, NeverCast,", "when the python framework quits. This has been necessary recently", "https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.6.1' release_notes = { '1.6.1': \"\"\" Fixed", "bug where party_member_bot could get influenced by real controller input.", "\"\"\" } release_banner = \"\"\" ______ _ ______ _ 10100", "Thanks to ccman32 and dtracers for delivering this fix quickly!", "lines that go past the camera. Formerly it was \"draw", "can now get information about the ball's status in Dropshot", "during replays and the postgame. - tarehart - Fixed a", "\"\"\" - Maximum size for a render message has been", "bug resulting in incorrect quickchat - dtracers - Added more", "now. - Got rid of the libpng warning seen when", "notes - tarehart and Skyborg \"\"\" } release_banner = \"\"\"", "when clicking run if no Rocket League process is found.", "- ccman32 * *************************************************** Plus: - Changing the rendering strategy", "a lower-level representation of physics data which updates at 120Hz", "Faster way to access ball prediction data in python. -", "\"\"\", '1.5.0': \"\"\" Adding a have_internet helper function to help", "where the GUI would crash with a \"KeyError\". - hallo_doei", "Skyborg \"\"\", '1.1.1': \"\"\" You can now get information about", "kill bots twice in a row. - Clicking on the", "of 100. This means you can draw a lot of", "| | 00101 110011 | |_/ / | | |_/", "has been necessary recently to avoid buggy situations. - Shutting", "to Rocket League patch 1.50 with the following known issues:", "The latest Rocket League patch broke dodges for our bots;", "bots would dodge when they intended to double jump. -tarehart", "\"\"\" Fixed a bug where field info was not extracted", "two great new features! 1. Setting game state. You can", "data is fixed - Boost pad data is fixed -", "proper fix as soon as possible. You may also choose", "\"\"\", '1.2.0': \"\"\" - We now offer a 'RigidBodyTick' thanks", "\"\"\" Accurate ball prediction for Hoops and Dropshot modes! -", "_ ______ _ 10100 | ___ \\ | | ___", "and hallo-doei - Fixed bug resulting in incorrect quickchat -", "that would commonly happen after a match ends. As a", "|_/ / ___ | |_ 110011 00110110 | /| |", "flips! - ccman32 * *************************************************** Plus: - Changing the rendering", "response to Rocket League patch 1.50 with the following known", "for 3D lines that go past the camera. Formerly it", "import it in setup.py for the same reason # 3)", "long time ago so this isn't as big a milestone", "key. - Fixed a bug where party_member_bot could get influenced", "tarehart - Fixed a bug where bots would dodge when", "module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.6.1' release_notes = { '1.6.1':", "to allow multiple agents to saved/loaded correctly if they have", "___ \\ | | 00101 110011 | |_/ / |", "saved/loaded correctly if they have the same name. - ima9rd", "- ima9rd \"\"\", '1.5.0': \"\"\" Adding a have_internet helper function", "breaks the config. \"\"\", '1.3.0': \"\"\" Accurate ball prediction for", "support by whatisaphone \"\"\", '0.0.32': \"\"\" More comprehensive fix for", "https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug fixes: - Fixed a bug where auto-run", "release_notes[__version__] return '' def get_help_text(): return \"Trouble? Ask on Discord", "in the GUI - hallo-doei - Fix for GUI not", "to the nearest integer, so 0.3% boost will now appear", "prediction data in python. - Skyborg - Java bots will", "cars will now round up to the nearest integer, so", "Fixed agent preset loading to allow multiple agents to saved/loaded", "bot without it, but this feature is quite nice for", "'0.0.31': \"\"\" Rapid response to Rocket League patch 1.50 with", "do next-level wall reads, catches, and dribbles! You can read", "now play on Salty Shores thanks to hallo_doei - Bug", "a match ends. As a side effect, you can no", "physics data which updates at 120Hz and is not subject", "Redox - Fancy release notes - tarehart and Skyborg \"\"\"", "\\ | | ___ \\ | | 00101 110011 |", "a row in the GUI will no longer spawn duplicate", "get influenced by real controller input. - Creating new presets", "to launch Rocket League when clicking run if no Rocket", "ball prediction for Hoops and Dropshot modes! - Kipje13, Marvin,", "\"beta\" a long time ago so this isn't as big", "loading certain RLBot config files with relative paths for agents.", "the libpng warning seen when using the GUI. - Giving", "the standard arena, not hoops or dropshot. Documentation and examples", "saving correct path - hallo-doei - Fix for GUI crash", "twice in a row in the GUI will no longer", "| | |_/ / ___ | |_ 110011 00110110 |", "at https://github.com/RLBot/RLBot/issues\" def print_current_release_notes(): print(release_banner) print(\"Version {}\".format(__version__)) print(get_current_release_notes()) print(get_help_text()) print(\"\")", "missing - Loadout configuration is broken Thanks to ccman32 and", "return release_notes[__version__] return '' def get_help_text(): return \"Trouble? Ask on", "Fixed a bug where the GUI would crash with a", "core dll that is less likely to break when Rocket", "experienced errors related to memory access. The limit is now", "for people with spaces in their file path by Zaptive", "__| 01101100 010010 | |\\ \\| |____| |_/ / (_)", "data during instant replays. \"\"\", '1.0.3': \"\"\" Time for the", "file path by Zaptive - Subprocess agent for future Rust", "'' def get_help_text(): return \"Trouble? Ask on Discord at https://discord.gg/5cNbXgG", "01101100 010010 | |\\ \\| |____| |_/ / (_) |", "duplicate processes. \"\"\", '1.1.2': \"\"\" Faster way to access ball", "1.0 release! We actually left \"beta\" a long time ago", "written by hallo_doei, ccman32, and tarehart 2. Ball prediction. We", "Add support for auto starting .NET executables. \"\"\", '1.5.1': \"\"\"", "Boost pad data is fixed - Loadout configuration is fixed", "- Rearranged the GUI a bit, and made it load", "\"\"\" The latest Rocket League patch broke dodges for our", "velocity, etc of the ball and the cars! This can", "- The loadout config for orange team is now respected", "GUI to launch Rocket League when clicking run if no", "thanks to hallo_doei! Read all about it at https://github.com/RLBot/RLBot/wiki/Dropshot Other", "off with the [end] key. - Fixed a bug where", "was \"draw it, even though it's crazy sometimes\", now it", "up. \"\"\", '1.2.2': \"\"\" - Rearranged the GUI a bit,", "even though it's crazy sometimes\", now it will be \"don't", "spawn duplicate processes. \"\"\", '1.1.2': \"\"\" Faster way to access", "by chip and tarehart Bonus: - You can now play", "now shut down when the python framework quits. This has", "here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written by chip and tarehart Bonus: -", "scientists among us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more details! - Faster", "draw it\". - Showing the rate that inputs are received", "executables. \"\"\", '1.5.1': \"\"\" Fixed crash with GUI when no", "more effectively. - Fixed bug where RUN button behavior in", "the ball and the cars! This can be a great", "for auto-running java bots during tournaments. To take advantage of", "patch broke dodges for our bots; this update fixes it.", "decreased again because many people experienced errors related to memory", "are messed up. \"\"\", '1.2.2': \"\"\" - Rearranged the GUI", "Plus: - Changing the rendering strategy for 3D lines that", "lines at once without getting errors. - Boost amount for", "\"\"\" Faster way to access ball prediction data in python.", "a factor of 100. This means you can draw a", "Bug fix for people with spaces in their file path", "a milestone as the number implies, but we DO have", "et. al. \"\"\", '1.2.6': \"\"\" Fixed a bug where field", "left \"beta\" a long time ago so this isn't as", "down when the python framework quits. This has been necessary", "fix as soon as possible. You may also choose to", "may also choose to stay on Rocket League 1.49 and", "agent preset loading to allow multiple agents to saved/loaded correctly", "a render message has been decreased again because many people", "us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more details! - Faster way to", "\\ \"or report an issue at https://github.com/RLBot/RLBot/issues\" def print_current_release_notes(): print(release_banner)", "report an issue at https://github.com/RLBot/RLBot/issues\" def print_current_release_notes(): print(release_banner) print(\"Version {}\".format(__version__))", "chip and tarehart Bonus: - You can now play on", "Toggle back off with the [end] key. - Fixed a", "it in setup.py for the same reason # 3) we", "a bug where party_member_bot could get influenced by real controller", "messages when cfg files are messed up. \"\"\", '1.2.2': \"\"\"", "This has been necessary recently to avoid buggy situations. -", "where party_member_bot could get influenced by real controller input. -", "110011 00110110 | /| | | ___ \\/ _ \\|", "#167) - Redox - Fixed typo in rlbot.cfg - Redox", "pad data is fixed - Loadout configuration is fixed Thanks", "quits. This has been necessary recently to avoid buggy situations.", "with the following known issues: - Dropshot tile data is", "https://github.com/RLBot/RLBot/wiki/Dropshot Other changes: - The loadout config for orange team", "upgrade checks. - ima9rd \"\"\", '1.4.2': \"\"\" Adding support for", "- Fixed a bug where auto-run executables would crash when", "* *************************************************** Plus: - Changing the rendering strategy for 3D", "when you press the [home] key. Toggle back off with", "relative paths for agents. Fixed agent preset loading to allow", "for agents. Fixed agent preset loading to allow multiple agents", "same name. - ima9rd \"\"\", '1.6.0':\"\"\" Add support for auto", "Shutting down the python framework will no longer attempt to", "We now provide a list of future ball positions based", "- Avoiding and suppressing some game crashes, and also restoring", "information about the ball's status in Dropshot mode thanks to", "| |_/ / ___ | |_ 110011 00110110 | /|", "help streamline upgrade checks. - ima9rd \"\"\", '1.4.2': \"\"\" Adding", "a bug where bots would dodge when they intended to", "- Boost pad data is fixed - Loadout configuration is", "to stay on Rocket League 1.49 and RLBot 0.0.30, ask", "configuration is fixed Thanks to ccman32 and dtracers for delivering", "team in the GUI no longer breaks the config. \"\"\",", "to ccman32 and dtracers for delivering this fix quickly! \"\"\",", "- Fix for items with a ':' not showing up", "break when Rocket League is patched - ccman32 and hallo-doei", "is not subject to interpolation. You can still make a", "also restoring the ability to get game tick data during", "for orange team is now respected again. - ccman32 -", "here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently the wall bounces are only accurate", "crash with a \"KeyError\". - hallo_doei - Avoiding and suppressing", "in setup.py for the same reason # 3) we can", "without getting errors. - Boost amount for cars will now", "another team in the GUI no longer breaks the config.", "get creative with it. Visit the wiki for details and", "Creating new presets in the GUI works better now. -", "00110110 | /| | | ___ \\/ _ \\| __|", "Adding file checking before injection (Resolves #167) - Redox -", "new features! 1. Setting game state. You can manipulate the", "team is now respected again. - ccman32 - Fixed a", "Visit the wiki for details and documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code", "not showing up in the GUI - hallo-doei - Fix", "bug where bots would dodge when they intended to double", "the ability to get game tick data during replays and", "Maximum size for a render message has been increased by", "ima9rd \"\"\", '1.5.0': \"\"\" Adding a have_internet helper function to", "|_/ / (_) | |_ 010010 10010 \\_| \\_\\_____/\\____/ \\___/", "attempt to kill bots twice in a row. - Clicking", "state. You can manipulate the position, velocity, etc of the", "Rocket League patch 1.50. Compared to previous version: - Dropshot", "longer breaks the config. \"\"\", '1.3.0': \"\"\" Accurate ball prediction", "return \"Trouble? Ask on Discord at https://discord.gg/5cNbXgG \" \\ \"or", "details! - Faster way to access ball prediction data in", "will now appear as 1 instead of 0. - Fixed", "and dtracers for delivering this fix quickly! \"\"\", '0.0.31': \"\"\"", "*************************************************** Plus: - Changing the rendering strategy for 3D lines", "it load and track appearance configs more effectively. - Fixed", "without it, but this feature is quite nice for the", "Fixed a bug where field info was not extracted properly", "'1.2.2': \"\"\" - Rearranged the GUI a bit, and made", "def get_current_release_notes(): if __version__ in release_notes: return release_notes[__version__] return ''", "goals rather than the expected 140. \"\"\", '1.2.5': \"\"\" ***************************************************", "1. Setting game state. You can manipulate the position, velocity,", "access ball prediction data in python. - Skyborg \"\"\", '1.1.1':", "a long time ago so this isn't as big a", "dtracers for delivering this short-term fix quickly. We will follow", "tournaments. To take advantage of this in your bot, see", "| /| | | ___ \\/ _ \\| __| 01101100", "about the math involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently the wall", "We will follow this up with a proper fix as", "- Changing the rendering strategy for 3D lines that go", "it. \"\"\", '1.0.5': \"\"\" Maximum size for a render message", "Java bots will now shut down when the python framework", "def get_help_text(): return \"Trouble? Ask on Discord at https://discord.gg/5cNbXgG \"", "| | ___ \\ | | 00101 110011 | |_/", "Thanks to ccman32 and dtracers for delivering this short-term fix", "the scientists among us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more details! -", "\"\"\", '1.0.5': \"\"\" Maximum size for a render message has", "press the [home] key. Toggle back off with the [end]", "the camera. Formerly it was \"draw it, even though it's", "auto-run executables would crash when trying to write to stderr.", "implies, but we DO have two great new features! 1.", "it will be \"don't draw it\". - Showing the rate", "\"\"\" def get_current_release_notes(): if __version__ in release_notes: return release_notes[__version__] return", "- Redox - Fancy release notes - tarehart and Skyborg", "next-level wall reads, catches, and dribbles! You can read about", "on Discord at https://discord.gg/5cNbXgG \" \\ \"or report an issue", "\"\"\", '1.4.2': \"\"\" Adding support for auto-running java bots during", "now respected again. - ccman32 - Fixed a bug where", "as the number implies, but we DO have two great", "nice for the scientists among us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more", "short-term fix quickly. We will follow this up with a", "in __init__.py # 2) we can import it in setup.py", "1.49 and RLBot 0.0.30, ask for instructions on discord. \"\"\",", "GUI a bit, and made it load and track appearance", "You can read about the math involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note:", "big a milestone as the number implies, but we DO", "specific error messages when cfg files are messed up. \"\"\",", "double jump. -tarehart \"\"\", '1.0.6': \"\"\" The latest Rocket League", "bots; this update fixes it. \"\"\", '1.0.5': \"\"\" Maximum size", "auto starting .NET executables. \"\"\", '1.5.1': \"\"\" Fixed crash with", "see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug fixes: - Fixed a bug where", "\"\"\", '1.1.2': \"\"\" Faster way to access ball prediction data", "tile data is missing - Boost pad data is missing", "the GUI a bit, and made it load and track", "Discord at https://discord.gg/5cNbXgG \" \\ \"or report an issue at", "hallo_doei - Avoiding and suppressing some game crashes, and also", "file was found. Updated GUI to launch Rocket League when", "Dragging bots to another team in the GUI no longer", "python framework quits. This has been necessary recently to avoid", "Fix for GUI crash when saving preset then canceling -", "and documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written by hallo_doei, ccman32, and", "each player index when you press the [home] key. Toggle", "framework will no longer attempt to kill bots twice in", "you can draw a lot of lines at once without", "dodge when they intended to double jump. -tarehart \"\"\", '1.0.6':", "on the \"Run\" button twice in a row in the", "\\___/ \\__| 01001 \"\"\" def get_current_release_notes(): if __version__ in release_notes:", "___ | |_ 110011 00110110 | /| | | ___", "an issue at https://github.com/RLBot/RLBot/issues\" def print_current_release_notes(): print(release_banner) print(\"Version {}\".format(__version__)) print(get_current_release_notes())", "wiki for details and documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written by", "- Added more built-in colors to the python rendering manager", "quite nice for the scientists among us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for", "up to the nearest integer, so 0.3% boost will now", "make a great bot without it, but this feature is", "dll that is less likely to break when Rocket League", "You can manipulate the position, velocity, etc of the ball", "draw a lot of lines at once without getting errors.", "- Fix for GUI not saving correct path - hallo-doei", "|_ 010010 10010 \\_| \\_\\_____/\\____/ \\___/ \\__| 01001 \"\"\" def", "during tournaments. To take advantage of this in your bot,", "could get influenced by real controller input. - Creating new", "\\ | | 00101 110011 | |_/ / | |", "GUI crash when saving preset then canceling - hallo-doei -", "[end] key. - Fixed a bug where party_member_bot could get", "with GUI when no default RLBot.cfg file was found. Updated", "it, but this feature is quite nice for the scientists", "render message has been decreased again because many people experienced", "Showing the rate that inputs are received for each player", "access ball prediction data in python. - Skyborg \"\"\", '1.1.3':", "as possible. You may also choose to stay on Rocket", "and also restoring the ability to get game tick data", "details and documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written by hallo_doei, ccman32,", "by Zaptive - Subprocess agent for future Rust support by", "- Fixed bug where RUN button behavior in the GUI", "representation of physics data which updates at 120Hz and is", "See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more details! - Faster way to access", "lower-level representation of physics data which updates at 120Hz and", "written by chip and tarehart Bonus: - You can now", "\"\"\" *************************************************** * Fix for dodge cancels / half flips!", "with a \"KeyError\". - hallo_doei - Avoiding and suppressing some", "- ccman32 - Fixed a bug where the GUI would", "python framework will no longer attempt to kill bots twice", "https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written by chip and tarehart Bonus: - You", "GUI not saving correct path - hallo-doei - Fix for", "for more details! - Faster way to access ball prediction", "will no longer spawn duplicate processes. \"\"\", '1.1.2': \"\"\" Faster", "and suppressing some game crashes, and also restoring the ability", "data is missing - Loadout configuration is broken Thanks to", "here so: # 1) we don't load dependencies by storing", "by storing it in __init__.py # 2) we can import", "for auto starting .NET executables. \"\"\", '1.5.1': \"\"\" Fixed crash", "info was not extracted properly during dropshot mode. It was", "and dtracers for delivering this short-term fix quickly. We will", "| ___ \\ | | 00101 110011 | |_/ /", "be a great help during bot development, and you can", "# 2) we can import it in setup.py for the", "in python. - Skyborg \"\"\", '1.1.1': \"\"\" You can now", "get game tick data during replays and the postgame. -", "can be a great help during bot development, and you", "multiple agents to saved/loaded correctly if they have the same", "bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug fixes: - Fixed a bug", "It was reporting 2 goals rather than the expected 140.", "Rearranged the GUI a bit, and made it load and", "1 instead of 0. - Fixed a crash that would", "rid of the libpng warning seen when using the GUI.", "python. - Skyborg - Java bots will now shut down", "whatisaphone! It's a lower-level representation of physics data which updates", "load and track appearance configs more effectively. - Fixed bug", "the nearest integer, so 0.3% boost will now appear as", "ball prediction data in python. - Skyborg \"\"\", '1.1.1': \"\"\"", "the number implies, but we DO have two great new", "\"don't draw it\". - Showing the rate that inputs are", "for instructions on discord. \"\"\", '0.0.30': \"\"\" - New core", "crash when loading certain RLBot config files with relative paths", "changes: - The loadout config for orange team is now", "a ':' not showing up in the GUI - hallo-doei", "for delivering this short-term fix quickly. We will follow this", "Note: currently the wall bounces are only accurate on the", "delivering this short-term fix quickly. We will follow this up", "the config. \"\"\", '1.3.0': \"\"\" Accurate ball prediction for Hoops", "\"\"\", '1.2.6': \"\"\" Fixed a bug where field info was", "ima9rd \"\"\", '1.4.2': \"\"\" Adding support for auto-running java bots", "Loadout configuration is fixed Thanks to ccman32 and dtracers for", "Code written by chip and tarehart Bonus: - You can", "Zaptive - Subprocess agent for future Rust support by whatisaphone", "Avoiding and suppressing some game crashes, and also restoring the", "and examples can be found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written by", "broken Thanks to ccman32 and dtracers for delivering this short-term", "RLBot.cfg file was found. Updated GUI to launch Rocket League", "now provide a list of future ball positions based on", "1.50 with the following known issues: - Dropshot tile data", "our bots; this update fixes it. \"\"\", '1.0.5': \"\"\" Maximum", ".NET executables. \"\"\", '1.5.1': \"\"\" Fixed crash with GUI when", "a 'RigidBodyTick' thanks to whatisaphone! It's a lower-level representation of", "hallo-doei - Adding file checking before injection (Resolves #167) -", "the wiki for details and documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written", "\"KeyError\". - hallo_doei - Avoiding and suppressing some game crashes,", "2 goals rather than the expected 140. \"\"\", '1.2.5': \"\"\"", "bots during tournaments. To take advantage of this in your", "happen after a match ends. As a side effect, you", "properly during dropshot mode. It was reporting 2 goals rather", "would crash when trying to write to stderr. - Dragging", "crashes, and also restoring the ability to get game tick", "0.0.30, ask for instructions on discord. \"\"\", '0.0.30': \"\"\" -", "\"\"\" - Faster way to access ball prediction data in", "can also get creative with it. Visit the wiki for", "can now play on Salty Shores thanks to hallo_doei -", "for items with a ':' not showing up in the", "'0.0.30': \"\"\" - New core dll that is less likely", "into your module module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.6.1' release_notes", "framework quits. This has been necessary recently to avoid buggy", "game crashes, and also restoring the ability to get game", "\"\"\" - We now offer a 'RigidBodyTick' thanks to whatisaphone!", "- Fixed a crash that would commonly happen after a", "commonly happen after a match ends. As a side effect,", "return '' def get_help_text(): return \"Trouble? Ask on Discord at", "bug where field info was not extracted properly during dropshot", "a bug where auto-run executables would crash when trying to", "among us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more details! - Faster way", "Subprocess agent for future Rust support by whatisaphone \"\"\", '0.0.32':", "\"Trouble? Ask on Discord at https://discord.gg/5cNbXgG \" \\ \"or report", "\"\"\", '1.0.4': \"\"\" - Maximum size for a render message", "track appearance configs more effectively. - Fixed bug where RUN", "bots twice in a row. - Clicking on the \"Run\"", "run if no Rocket League process is found. - ima9rd", "\\_\\_____/\\____/ \\___/ \\__| 01001 \"\"\" def get_current_release_notes(): if __version__ in", "the big 1.0 release! We actually left \"beta\" a long", "that go past the camera. Formerly it was \"draw it,", "limit is now only double the original. \"\"\", '1.0.4': \"\"\"", "and Skyborg \"\"\" } release_banner = \"\"\" ______ _ ______", "you can also get creative with it. Visit the wiki", "| |_/ / | | |_/ / ___ | |_", "longer spawn duplicate processes. \"\"\", '1.1.2': \"\"\" Faster way to", "also choose to stay on Rocket League 1.49 and RLBot", "- Kipje13, Marvin, NeverCast, et. al. \"\"\", '1.2.6': \"\"\" Fixed", "down the python framework will no longer attempt to kill", "get_help_text(): return \"Trouble? Ask on Discord at https://discord.gg/5cNbXgG \" \\", "would crash with a \"KeyError\". - hallo_doei - Avoiding and", "can manipulate the position, velocity, etc of the ball and", "this fix quickly! \"\"\", '0.0.31': \"\"\" Rapid response to Rocket", "We now offer a 'RigidBodyTick' thanks to whatisaphone! It's a", "messed up. \"\"\", '1.2.2': \"\"\" - Rearranged the GUI a", "number implies, but we DO have two great new features!", "GUI would crash with a \"KeyError\". - hallo_doei - Avoiding", "positions based on chip's excellent physics modeling. Take advantage of", "by hallo_doei, ccman32, and tarehart 2. Ball prediction. We now", "League when clicking run if no Rocket League process is", "agents to saved/loaded correctly if they have the same name.", "size for a render message has been increased by a", "presets in the GUI works better now. - Got rid", "it's crazy sometimes\", now it will be \"don't draw it\".", "position, velocity, etc of the ball and the cars! This", "for our bots; this update fixes it. \"\"\", '1.0.5': \"\"\"", "Fixed crash with GUI when no default RLBot.cfg file was", "future ball positions based on chip's excellent physics modeling. Take", "lot of lines at once without getting errors. - Boost", "examples can be found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written by chip", "- hallo-doei - Adding file checking before injection (Resolves #167)", "NeverCast, et. al. \"\"\", '1.2.6': \"\"\" Fixed a bug where", "\\/ _ \\| __| 01101100 010010 | |\\ \\| |____|", "paths for agents. Fixed agent preset loading to allow multiple", "documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written by hallo_doei, ccman32, and tarehart", "message has been increased by a factor of 100. This", "2. Ball prediction. We now provide a list of future", "again because many people experienced errors related to memory access.", "dropshot. Documentation and examples can be found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code", "-tarehart \"\"\", '1.0.6': \"\"\" The latest Rocket League patch broke", "\"\"\", '1.5.1': \"\"\" Fixed crash with GUI when no default", "hallo_doei - Bug fix for people with spaces in their", "hallo_doei! Read all about it at https://github.com/RLBot/RLBot/wiki/Dropshot Other changes: -", "that is less likely to break when Rocket League is", "whatisaphone \"\"\", '0.0.32': \"\"\" More comprehensive fix for Rocket League", "a render message has been increased by a factor of", "your bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug fixes: - Fixed a", "time ago so this isn't as big a milestone as", "this feature is quite nice for the scientists among us.", "{ '1.6.1': \"\"\" Fixed GUI crash when loading certain RLBot", "allow multiple agents to saved/loaded correctly if they have the", "- New core dll that is less likely to break", "\\| |____| |_/ / (_) | |_ 010010 10010 \\_|", "can import it into your module module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__", "the same reason # 3) we can import it into", "it at https://github.com/RLBot/RLBot/wiki/Dropshot Other changes: - The loadout config for", "controller input. - Creating new presets in the GUI works", "function to help streamline upgrade checks. - ima9rd \"\"\", '1.4.2':", "- Maximum size for a render message has been increased", "rendering strategy for 3D lines that go past the camera.", "will follow this up with a proper fix as soon", "release_banner = \"\"\" ______ _ ______ _ 10100 | ___", "catches, and dribbles! You can read about the math involved", "and the cars! This can be a great help during", "GUI - hallo-doei - Fix for GUI not saving correct", "- https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written by hallo_doei, ccman32, and tarehart 2.", "\"\"\" - New core dll that is less likely to", "a bug where field info was not extracted properly during", "if __version__ in release_notes: return release_notes[__version__] return '' def get_help_text():", "on Salty Shores thanks to hallo_doei - Bug fix for", "python rendering manager - Eastvillage - Fix for items with", "is missing - Loadout configuration is broken Thanks to ccman32", "better now. - Got rid of the libpng warning seen", "- Dragging bots to another team in the GUI no", "made it load and track appearance configs more effectively. -", "\\| __| 01101100 010010 | |\\ \\| |____| |_/ /", "in the GUI will no longer spawn duplicate processes. \"\"\",", "\"\"\" You can now get information about the ball's status", "help during bot development, and you can also get creative", "and tarehart 2. Ball prediction. We now provide a list", "not extracted properly during dropshot mode. It was reporting 2", "- Clicking on the \"Run\" button twice in a row", "the expected 140. \"\"\", '1.2.5': \"\"\" *************************************************** * Fix for", "\\__| 01001 \"\"\" def get_current_release_notes(): if __version__ in release_notes: return", "to stderr. - Dragging bots to another team in the", "'1.6.1' release_notes = { '1.6.1': \"\"\" Fixed GUI crash when", "factor of 100. This means you can draw a lot", "\"\"\" ______ _ ______ _ 10100 | ___ \\ |", "people with spaces in their file path by Zaptive -", "chip's excellent physics modeling. Take advantage of this to do", "we can import it in setup.py for the same reason", "|\\ \\| |____| |_/ / (_) | |_ 010010 10010", "Documentation and examples can be found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written", "3D lines that go past the camera. Formerly it was", "math involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently the wall bounces are", "for the scientists among us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more details!", "ability to get game tick data during replays and the", "DO have two great new features! 1. Setting game state.", "influenced by real controller input. - Creating new presets in", "al. \"\"\", '1.2.6': \"\"\" Fixed a bug where field info", "have two great new features! 1. Setting game state. You", "less likely to break when Rocket League is patched -", "is patched - ccman32 and hallo-doei - Fixed bug resulting", "'0.0.32': \"\"\" More comprehensive fix for Rocket League patch 1.50.", "this in your bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug fixes: -", "they intended to double jump. -tarehart \"\"\", '1.0.6': \"\"\" The", "data during replays and the postgame. - tarehart - Fixed", "prediction. We now provide a list of future ball positions", "bounces are only accurate on the standard arena, not hoops", "as big a milestone as the number implies, but we", "Fixed a bug where bots would dodge when they intended", "00101 110011 | |_/ / | | |_/ / ___", "on the standard arena, not hoops or dropshot. Documentation and", "latest Rocket League patch broke dodges for our bots; this", "ccman32 and dtracers for delivering this fix quickly! \"\"\", '0.0.31':", "- Skyborg \"\"\", '1.1.1': \"\"\" You can now get information", "is quite nice for the scientists among us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick", "up-to-date player data during instant replays. \"\"\", '1.0.3': \"\"\" Time", "but we DO have two great new features! 1. Setting", "will be \"don't draw it\". - Showing the rate that", "same reason # 3) we can import it into your", "Updated GUI to launch Rocket League when clicking run if", "\"draw it, even though it's crazy sometimes\", now it will", "is fixed - Loadout configuration is fixed Thanks to ccman32", "restoring the ability to get game tick data during replays", "button behavior in the GUI would not work after killing", "with spaces in their file path by Zaptive - Subprocess", "preset then canceling - hallo-doei - Adding file checking before", "standard arena, not hoops or dropshot. Documentation and examples can", "advantage of this to do next-level wall reads, catches, and", "Fixed GUI crash when loading certain RLBot config files with", "10100 | ___ \\ | | ___ \\ | |", "excellent physics modeling. Take advantage of this to do next-level", "of future ball positions based on chip's excellent physics modeling.", "Rocket League process is found. - ima9rd \"\"\", '1.5.0': \"\"\"", "as soon as possible. You may also choose to stay", "and the postgame. - tarehart - Fixed a bug where", "010010 | |\\ \\| |____| |_/ / (_) | |_", "comprehensive fix for Rocket League patch 1.50. Compared to previous", "where auto-run executables would crash when trying to write to", "your module module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.6.1' release_notes =", "and RLBot 0.0.30, ask for instructions on discord. \"\"\", '0.0.30':", "subject to interpolation. You can still make a great bot", "row in the GUI will no longer spawn duplicate processes.", "more built-in colors to the python rendering manager - Eastvillage", "render message has been increased by a factor of 100.", "after a match ends. As a side effect, you can", "stderr. - Dragging bots to another team in the GUI", "dtracers - Added more built-in colors to the python rendering", "to do next-level wall reads, catches, and dribbles! You can", "name. - ima9rd \"\"\", '1.6.0':\"\"\" Add support for auto starting", "a proper fix as soon as possible. You may also", "are received for each player index when you press the", "soon as possible. You may also choose to stay on", "issues: - Dropshot tile data is missing - Boost pad", "suppressing some game crashes, and also restoring the ability to", "Adding a have_internet helper function to help streamline upgrade checks.", "python. - Skyborg \"\"\", '1.1.3': \"\"\" - Faster way to", "checking before injection (Resolves #167) - Redox - Fixed typo", "ball positions based on chip's excellent physics modeling. Take advantage", "and is not subject to interpolation. You can still make", "______ _ 10100 | ___ \\ | | ___ \\", "\\_| \\_\\_____/\\____/ \\___/ \\__| 01001 \"\"\" def get_current_release_notes(): if __version__", "starting .NET executables. \"\"\", '1.5.1': \"\"\" Fixed crash with GUI", "you press the [home] key. Toggle back off with the", "people experienced errors related to memory access. The limit is", "using the GUI. - Giving specific error messages when cfg", "previous version: - Dropshot tile data is fixed - Boost", "you can no longer see up-to-date player data during instant", "bit, and made it load and track appearance configs more", "the ball's status in Dropshot mode thanks to hallo_doei! Read", "\"\"\", '1.1.3': \"\"\" - Faster way to access ball prediction", "longer attempt to kill bots twice in a row. -", "\"Run\" button twice in a row in the GUI will", "- Loadout configuration is broken Thanks to ccman32 and dtracers", "2) we can import it in setup.py for the same", "take advantage of this in your bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus", "- Dropshot tile data is missing - Boost pad data", "is fixed - Boost pad data is fixed - Loadout", "about the ball's status in Dropshot mode thanks to hallo_doei!", "necessary recently to avoid buggy situations. - Shutting down the", "- Showing the rate that inputs are received for each", "Plus bug fixes: - Fixed a bug where auto-run executables", "with it. Visit the wiki for details and documentation -", "'1.1.2': \"\"\" Faster way to access ball prediction data in", "real controller input. - Creating new presets in the GUI", "crash with GUI when no default RLBot.cfg file was found.", "______ _ ______ _ 10100 | ___ \\ | |", "in the GUI would not work after killing bots. \"\"\",", "https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written by hallo_doei, ccman32, and tarehart 2. Ball", "- Fixed a bug where the GUI would crash with", "streamline upgrade checks. - ima9rd \"\"\", '1.4.2': \"\"\" Adding support", "would commonly happen after a match ends. As a side", "3) we can import it into your module module #", "and tarehart Bonus: - You can now play on Salty", "a crash that would commonly happen after a match ends.", "java bots during tournaments. To take advantage of this in", "bug where auto-run executables would crash when trying to write", "Added more built-in colors to the python rendering manager -", "to get game tick data during replays and the postgame.", "found. - ima9rd \"\"\", '1.5.0': \"\"\" Adding a have_internet helper", "write to stderr. - Dragging bots to another team in", "will now shut down when the python framework quits. This", "don't load dependencies by storing it in __init__.py # 2)", "__version__ = '1.6.1' release_notes = { '1.6.1': \"\"\" Fixed GUI", "advantage of this in your bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug", "to saved/loaded correctly if they have the same name. -", "related to memory access. The limit is now only double", "- Adding file checking before injection (Resolves #167) - Redox", "Ask on Discord at https://discord.gg/5cNbXgG \" \\ \"or report an", "Skyborg \"\"\" } release_banner = \"\"\" ______ _ ______ _", "rather than the expected 140. \"\"\", '1.2.5': \"\"\" *************************************************** *", "than the expected 140. \"\"\", '1.2.5': \"\"\" *************************************************** * Fix", "Kipje13, Marvin, NeverCast, et. al. \"\"\", '1.2.6': \"\"\" Fixed a", "\"\"\", '0.0.32': \"\"\" More comprehensive fix for Rocket League patch", "the original. \"\"\", '1.0.4': \"\"\" - Maximum size for a", "/ | | |_/ / ___ | |_ 110011 00110110", "accurate on the standard arena, not hoops or dropshot. Documentation", "fixed - Boost pad data is fixed - Loadout configuration", "___ \\/ _ \\| __| 01101100 010010 | |\\ \\|", "to break when Rocket League is patched - ccman32 and", "You can now get information about the ball's status in", "hallo_doei, ccman32, and tarehart 2. Ball prediction. We now provide", "Bonus: - You can now play on Salty Shores thanks", "for Hoops and Dropshot modes! - Kipje13, Marvin, NeverCast, et.", "the wall bounces are only accurate on the standard arena,", "field info was not extracted properly during dropshot mode. It", "| ___ \\/ _ \\| __| 01101100 010010 | |\\", "back off with the [end] key. - Fixed a bug", "fixes it. \"\"\", '1.0.5': \"\"\" Maximum size for a render", "milestone as the number implies, but we DO have two", "to write to stderr. - Dragging bots to another team", "010010 10010 \\_| \\_\\_____/\\____/ \\___/ \\__| 01001 \"\"\" def get_current_release_notes():", "player data during instant replays. \"\"\", '1.0.3': \"\"\" Time for", "bots will now shut down when the python framework quits.", "not saving correct path - hallo-doei - Fix for GUI", "bot development, and you can also get creative with it.", "modeling. Take advantage of this to do next-level wall reads,", "\"\"\" Adding a have_internet helper function to help streamline upgrade", "for the same reason # 3) we can import it", "certain RLBot config files with relative paths for agents. Fixed", "Fixed bug where RUN button behavior in the GUI would", "recently to avoid buggy situations. - Shutting down the python", "and you can also get creative with it. Visit the", "Marvin, NeverCast, et. al. \"\"\", '1.2.6': \"\"\" Fixed a bug", "feature is quite nice for the scientists among us. See", "libpng warning seen when using the GUI. - Giving specific", "Dropshot modes! - Kipje13, Marvin, NeverCast, et. al. \"\"\", '1.2.6':", "for each player index when you press the [home] key.", "a list of future ball positions based on chip's excellent", "if no Rocket League process is found. - ima9rd \"\"\",", "0.3% boost will now appear as 1 instead of 0.", "Boost amount for cars will now round up to the", "been decreased again because many people experienced errors related to", "dodges for our bots; this update fixes it. \"\"\", '1.0.5':", "now offer a 'RigidBodyTick' thanks to whatisaphone! It's a lower-level", "= { '1.6.1': \"\"\" Fixed GUI crash when loading certain", "League process is found. - ima9rd \"\"\", '1.5.0': \"\"\" Adding", "- Redox - Fixed typo in rlbot.cfg - Redox -", "index when you press the [home] key. Toggle back off", "ima9rd \"\"\", '1.6.0':\"\"\" Add support for auto starting .NET executables.", "can read about the math involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently", "it in __init__.py # 2) we can import it in", "means you can draw a lot of lines at once", "error messages when cfg files are messed up. \"\"\", '1.2.2':", "on chip's excellent physics modeling. Take advantage of this to", "manipulate the position, velocity, etc of the ball and the", "release notes - tarehart and Skyborg \"\"\" } release_banner =", "Changing the rendering strategy for 3D lines that go past", "longer see up-to-date player data during instant replays. \"\"\", '1.0.3':", "* Fix for dodge cancels / half flips! - ccman32", "|_ 110011 00110110 | /| | | ___ \\/ _", "- Skyborg \"\"\", '1.1.3': \"\"\" - Faster way to access", "status in Dropshot mode thanks to hallo_doei! Read all about", "in a row. - Clicking on the \"Run\" button twice", "a lot of lines at once without getting errors. -", "patched - ccman32 and hallo-doei - Fixed bug resulting in", "loading to allow multiple agents to saved/loaded correctly if they", "camera. Formerly it was \"draw it, even though it's crazy", "dependencies by storing it in __init__.py # 2) we can", "Maximum size for a render message has been decreased again", "tarehart 2. Ball prediction. We now provide a list of", "spaces in their file path by Zaptive - Subprocess agent", "\"\"\" More comprehensive fix for Rocket League patch 1.50. Compared", "\"\"\", '1.2.5': \"\"\" *************************************************** * Fix for dodge cancels /", "get information about the ball's status in Dropshot mode thanks", "go past the camera. Formerly it was \"draw it, even", "appear as 1 instead of 0. - Fixed a crash", "# https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.6.1' release_notes = { '1.6.1': \"\"\"", "errors related to memory access. The limit is now only", "and made it load and track appearance configs more effectively.", "respected again. - ccman32 - Fixed a bug where the", "Redox - Fixed typo in rlbot.cfg - Redox - Fancy", "Rocket League patch 1.50 with the following known issues: -", "bug where the GUI would crash with a \"KeyError\". -", "Read all about it at https://github.com/RLBot/RLBot/wiki/Dropshot Other changes: - The", "'1.0.5': \"\"\" Maximum size for a render message has been", "half flips! - ccman32 * *************************************************** Plus: - Changing the", "__init__.py # 2) we can import it in setup.py for", "crash that would commonly happen after a match ends. As", "release_notes = { '1.6.1': \"\"\" Fixed GUI crash when loading", "hallo-doei - Fix for GUI crash when saving preset then", "quickchat - dtracers - Added more built-in colors to the", "in the GUI no longer breaks the config. \"\"\", '1.3.0':", "= \"\"\" ______ _ ______ _ 10100 | ___ \\", "\"\"\" Fixed crash with GUI when no default RLBot.cfg file", "ago so this isn't as big a milestone as the", "it\". - Showing the rate that inputs are received for", "Fixed a bug where auto-run executables would crash when trying", "no longer spawn duplicate processes. \"\"\", '1.1.2': \"\"\" Faster way", "hallo-doei - Fixed bug resulting in incorrect quickchat - dtracers", "is broken Thanks to ccman32 and dtracers for delivering this", "no Rocket League process is found. - ima9rd \"\"\", '1.5.0':", "setup.py for the same reason # 3) we can import", "|____| |_/ / (_) | |_ 010010 10010 \\_| \\_\\_____/\\____/", "(Resolves #167) - Redox - Fixed typo in rlbot.cfg -", "on Rocket League 1.49 and RLBot 0.0.30, ask for instructions", "the GUI - hallo-doei - Fix for GUI not saving", "instead of 0. - Fixed a crash that would commonly", "likely to break when Rocket League is patched - ccman32", "- ima9rd \"\"\", '1.6.0':\"\"\" Add support for auto starting .NET", "- Shutting down the python framework will no longer attempt", "launch Rocket League when clicking run if no Rocket League" ]
[ "for FullyConnectedHead Args: unique_id: A unique identifier for the head.", "size for the fully connected layer. \"\"\" super().__init__(unique_id, num_classes) assert", "typing import Any, Dict import torch.nn as nn from classy_vision.generic.util", "be attached to a model, and unique_id is used to", "identifier for the head. Multiple instances of the same head", "and its affiliates. # # This source code is licensed", "\"\"\"Instantiates a FullyConnectedHead from a configuration. Args: config: A configuration", "bool = False, ): \"\"\"Constructor for FullyConnectedHead Args: unique_id: A", "in the # LICENSE file in the root directory of", "head. Multiple instances of the same head might be attached", "self.fc = None if num_classes is None else nn.Linear(in_plane, num_classes)", "the config. Returns: A FullyConnectedHead instance. \"\"\" num_classes = config.get(\"num_classes\",", "under the MIT license found in the # LICENSE file", "by a fully connected layer (:class:`torch.nn.Linear`). \"\"\" def __init__( self,", "= None if num_classes is None else nn.Linear(in_plane, num_classes) if", ":func:`__init__` for parameters expected in the config. Returns: A FullyConnectedHead", "None or is_pos_int(num_classes) assert is_pos_int(in_plane) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc", "config. Returns: A FullyConnectedHead instance. \"\"\" num_classes = config.get(\"num_classes\", None)", "-1) if self.fc is not None: out = self.fc(out) return", "file in the root directory of this source tree. from", "unique_id is used to refer to them. num_classes: Number of", "Dict[str, Any]) -> \"FullyConnectedHead\": \"\"\"Instantiates a FullyConnectedHead from a configuration.", "is not applied. in_plane: Input size for the fully connected", "Copyright (c) Facebook, Inc. and its affiliates. # # This", "#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates.", "to them. num_classes: Number of classes for the head. If", "LICENSE file in the root directory of this source tree.", "self.avgpool(x) # final classifier: out = out.reshape(out.size(0), -1) if self.fc", "\"\"\" def __init__( self, unique_id: str, num_classes: int, in_plane: int,", "the root directory of this source tree. from typing import", "out.reshape(out.size(0), -1) if self.fc is not None: out = self.fc(out)", "connected layer (:class:`torch.nn.Linear`). \"\"\" def __init__( self, unique_id: str, num_classes:", "the same head might be attached to a model, and", "as nn from classy_vision.generic.util import is_pos_int from classy_vision.heads import ClassyHead,", "\"\"\"This head defines a 2d average pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed", "its affiliates. # # This source code is licensed under", "assert is_pos_int(in_plane) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc = None if", "cls( config[\"unique_id\"], num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\", False), ) def forward(self, x):", "unique identifier for the head. Multiple instances of the same", "def forward(self, x): # perform average pooling: out = self.avgpool(x)", "connected layer. \"\"\" super().__init__(unique_id, num_classes) assert num_classes is None or", "= False, ): \"\"\"Constructor for FullyConnectedHead Args: unique_id: A unique", "import torch.nn as nn from classy_vision.generic.util import is_pos_int from classy_vision.heads", "might be attached to a model, and unique_id is used", "or is_pos_int(num_classes) assert is_pos_int(in_plane) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc =", "configuration for a FullyConnectedHead. See :func:`__init__` for parameters expected in", "(:class:`torch.nn.AdaptiveAvgPool2d`) followed by a fully connected layer (:class:`torch.nn.Linear`). \"\"\" def", "for the fully connected layer. \"\"\" super().__init__(unique_id, num_classes) assert num_classes", "connected layer is not applied. in_plane: Input size for the", "out = self.avgpool(x) # final classifier: out = out.reshape(out.size(0), -1)", "Input size for the fully connected layer. \"\"\" super().__init__(unique_id, num_classes)", "<gh_stars>1-10 #!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its", "False), ) def forward(self, x): # perform average pooling: out", "\"\"\"Constructor for FullyConnectedHead Args: unique_id: A unique identifier for the", "forward(self, x): # perform average pooling: out = self.avgpool(x) #", "head might be attached to a model, and unique_id is", "# LICENSE file in the root directory of this source", "torch.nn as nn from classy_vision.generic.util import is_pos_int from classy_vision.heads import", "num_classes) assert num_classes is None or is_pos_int(num_classes) assert is_pos_int(in_plane) self.avgpool", "is None or is_pos_int(num_classes) assert is_pos_int(in_plane) self.avgpool = nn.AdaptiveAvgPool2d((1, 1))", "followed by a fully connected layer (:class:`torch.nn.Linear`). \"\"\" def __init__(", "\"\"\" num_classes = config.get(\"num_classes\", None) in_plane = config[\"in_plane\"] return cls(", "average pooling: out = self.avgpool(x) # final classifier: out =", "the MIT license found in the # LICENSE file in", "-> \"FullyConnectedHead\": \"\"\"Instantiates a FullyConnectedHead from a configuration. Args: config:", "found in the # LICENSE file in the root directory", "in_plane: Input size for the fully connected layer. \"\"\" super().__init__(unique_id,", "FullyConnectedHead from a configuration. Args: config: A configuration for a", "= config.get(\"num_classes\", None) in_plane = config[\"in_plane\"] return cls( config[\"unique_id\"], num_classes,", "in_plane = config[\"in_plane\"] return cls( config[\"unique_id\"], num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\", False),", "num_classes is None else nn.Linear(in_plane, num_classes) if zero_init_bias: self.fc.bias.data.zero_() @classmethod", "license found in the # LICENSE file in the root", "this source tree. from typing import Any, Dict import torch.nn", "instances of the same head might be attached to a", "= config[\"in_plane\"] return cls( config[\"unique_id\"], num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\", False), )", "= self.avgpool(x) # final classifier: out = out.reshape(out.size(0), -1) if", "for the head. If None, then the fully connected layer", "= nn.AdaptiveAvgPool2d((1, 1)) self.fc = None if num_classes is None", "layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed by a fully connected layer (:class:`torch.nn.Linear`). \"\"\"", "a model, and unique_id is used to refer to them.", "directory of this source tree. from typing import Any, Dict", "from a configuration. Args: config: A configuration for a FullyConnectedHead.", "pooling: out = self.avgpool(x) # final classifier: out = out.reshape(out.size(0),", "nn.AdaptiveAvgPool2d((1, 1)) self.fc = None if num_classes is None else", "the fully connected layer is not applied. in_plane: Input size", "Any, Dict import torch.nn as nn from classy_vision.generic.util import is_pos_int", "= out.reshape(out.size(0), -1) if self.fc is not None: out =", "head. If None, then the fully connected layer is not", "in_plane, zero_init_bias=config.get(\"zero_init_bias\", False), ) def forward(self, x): # perform average", "layer is not applied. in_plane: Input size for the fully", "parameters expected in the config. Returns: A FullyConnectedHead instance. \"\"\"", "int, in_plane: int, zero_init_bias: bool = False, ): \"\"\"Constructor for", "@classmethod def from_config(cls, config: Dict[str, Any]) -> \"FullyConnectedHead\": \"\"\"Instantiates a", "import is_pos_int from classy_vision.heads import ClassyHead, register_head @register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead):", "from typing import Any, Dict import torch.nn as nn from", "nn from classy_vision.generic.util import is_pos_int from classy_vision.heads import ClassyHead, register_head", "refer to them. num_classes: Number of classes for the head.", "the head. If None, then the fully connected layer is", "None if num_classes is None else nn.Linear(in_plane, num_classes) if zero_init_bias:", "\"FullyConnectedHead\": \"\"\"Instantiates a FullyConnectedHead from a configuration. Args: config: A", "assert num_classes is None or is_pos_int(num_classes) assert is_pos_int(in_plane) self.avgpool =", "class FullyConnectedHead(ClassyHead): \"\"\"This head defines a 2d average pooling layer", "for the head. Multiple instances of the same head might", "See :func:`__init__` for parameters expected in the config. Returns: A", "if self.fc is not None: out = self.fc(out) return out", "register_head @register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead): \"\"\"This head defines a 2d average", "if num_classes is None else nn.Linear(in_plane, num_classes) if zero_init_bias: self.fc.bias.data.zero_()", "False, ): \"\"\"Constructor for FullyConnectedHead Args: unique_id: A unique identifier", "for parameters expected in the config. Returns: A FullyConnectedHead instance.", "Number of classes for the head. If None, then the", "fully connected layer. \"\"\" super().__init__(unique_id, num_classes) assert num_classes is None", "FullyConnectedHead. See :func:`__init__` for parameters expected in the config. Returns:", "else nn.Linear(in_plane, num_classes) if zero_init_bias: self.fc.bias.data.zero_() @classmethod def from_config(cls, config:", "None) in_plane = config[\"in_plane\"] return cls( config[\"unique_id\"], num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\",", "import ClassyHead, register_head @register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead): \"\"\"This head defines a", "the head. Multiple instances of the same head might be", "python3 # Copyright (c) Facebook, Inc. and its affiliates. #", "A configuration for a FullyConnectedHead. See :func:`__init__` for parameters expected", "zero_init_bias: bool = False, ): \"\"\"Constructor for FullyConnectedHead Args: unique_id:", "to a model, and unique_id is used to refer to", "head defines a 2d average pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed by", "# final classifier: out = out.reshape(out.size(0), -1) if self.fc is", "(c) Facebook, Inc. and its affiliates. # # This source", "num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\", False), ) def forward(self, x): # perform", "is licensed under the MIT license found in the #", "fully connected layer is not applied. in_plane: Input size for", "str, num_classes: int, in_plane: int, zero_init_bias: bool = False, ):", "num_classes: int, in_plane: int, zero_init_bias: bool = False, ): \"\"\"Constructor", "then the fully connected layer is not applied. in_plane: Input", "None else nn.Linear(in_plane, num_classes) if zero_init_bias: self.fc.bias.data.zero_() @classmethod def from_config(cls,", "is_pos_int(in_plane) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc = None if num_classes", "the # LICENSE file in the root directory of this", "__init__( self, unique_id: str, num_classes: int, in_plane: int, zero_init_bias: bool", "in the root directory of this source tree. from typing", "and unique_id is used to refer to them. num_classes: Number", "config: Dict[str, Any]) -> \"FullyConnectedHead\": \"\"\"Instantiates a FullyConnectedHead from a", "Args: config: A configuration for a FullyConnectedHead. See :func:`__init__` for", "from classy_vision.heads import ClassyHead, register_head @register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead): \"\"\"This head", "import Any, Dict import torch.nn as nn from classy_vision.generic.util import", "x): # perform average pooling: out = self.avgpool(x) # final", "in_plane: int, zero_init_bias: bool = False, ): \"\"\"Constructor for FullyConnectedHead", "self, unique_id: str, num_classes: int, in_plane: int, zero_init_bias: bool =", "config: A configuration for a FullyConnectedHead. See :func:`__init__` for parameters", "# Copyright (c) Facebook, Inc. and its affiliates. # #", "): \"\"\"Constructor for FullyConnectedHead Args: unique_id: A unique identifier for", "from_config(cls, config: Dict[str, Any]) -> \"FullyConnectedHead\": \"\"\"Instantiates a FullyConnectedHead from", "int, zero_init_bias: bool = False, ): \"\"\"Constructor for FullyConnectedHead Args:", "Returns: A FullyConnectedHead instance. \"\"\" num_classes = config.get(\"num_classes\", None) in_plane", "tree. from typing import Any, Dict import torch.nn as nn", "unique_id: str, num_classes: int, in_plane: int, zero_init_bias: bool = False,", "classes for the head. If None, then the fully connected", "config[\"in_plane\"] return cls( config[\"unique_id\"], num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\", False), ) def", "applied. in_plane: Input size for the fully connected layer. \"\"\"", "This source code is licensed under the MIT license found", "config[\"unique_id\"], num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\", False), ) def forward(self, x): #", "FullyConnectedHead Args: unique_id: A unique identifier for the head. Multiple", "of classes for the head. If None, then the fully", "expected in the config. Returns: A FullyConnectedHead instance. \"\"\" num_classes", "same head might be attached to a model, and unique_id", "return cls( config[\"unique_id\"], num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\", False), ) def forward(self,", "in the config. Returns: A FullyConnectedHead instance. \"\"\" num_classes =", "Dict import torch.nn as nn from classy_vision.generic.util import is_pos_int from", "perform average pooling: out = self.avgpool(x) # final classifier: out", "Args: unique_id: A unique identifier for the head. Multiple instances", "self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc = None if num_classes is", "num_classes) if zero_init_bias: self.fc.bias.data.zero_() @classmethod def from_config(cls, config: Dict[str, Any])", ") def forward(self, x): # perform average pooling: out =", "unique_id: A unique identifier for the head. Multiple instances of", "zero_init_bias=config.get(\"zero_init_bias\", False), ) def forward(self, x): # perform average pooling:", "pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed by a fully connected layer (:class:`torch.nn.Linear`).", "If None, then the fully connected layer is not applied.", "classy_vision.heads import ClassyHead, register_head @register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead): \"\"\"This head defines", "a FullyConnectedHead from a configuration. Args: config: A configuration for", "layer. \"\"\" super().__init__(unique_id, num_classes) assert num_classes is None or is_pos_int(num_classes)", "defines a 2d average pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed by a", "(:class:`torch.nn.Linear`). \"\"\" def __init__( self, unique_id: str, num_classes: int, in_plane:", "code is licensed under the MIT license found in the", "a fully connected layer (:class:`torch.nn.Linear`). \"\"\" def __init__( self, unique_id:", "to refer to them. num_classes: Number of classes for the", "A FullyConnectedHead instance. \"\"\" num_classes = config.get(\"num_classes\", None) in_plane =", "layer (:class:`torch.nn.Linear`). \"\"\" def __init__( self, unique_id: str, num_classes: int,", "classifier: out = out.reshape(out.size(0), -1) if self.fc is not None:", "nn.Linear(in_plane, num_classes) if zero_init_bias: self.fc.bias.data.zero_() @classmethod def from_config(cls, config: Dict[str,", "num_classes is None or is_pos_int(num_classes) assert is_pos_int(in_plane) self.avgpool = nn.AdaptiveAvgPool2d((1,", "them. num_classes: Number of classes for the head. If None,", "out = out.reshape(out.size(0), -1) if self.fc is not None: out", "2d average pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed by a fully connected", "used to refer to them. num_classes: Number of classes for", "final classifier: out = out.reshape(out.size(0), -1) if self.fc is not", "source code is licensed under the MIT license found in", "the fully connected layer. \"\"\" super().__init__(unique_id, num_classes) assert num_classes is", "Facebook, Inc. and its affiliates. # # This source code", "licensed under the MIT license found in the # LICENSE", "not applied. in_plane: Input size for the fully connected layer.", "for a FullyConnectedHead. See :func:`__init__` for parameters expected in the", "num_classes = config.get(\"num_classes\", None) in_plane = config[\"in_plane\"] return cls( config[\"unique_id\"],", "Multiple instances of the same head might be attached to", "a configuration. Args: config: A configuration for a FullyConnectedHead. See", "def __init__( self, unique_id: str, num_classes: int, in_plane: int, zero_init_bias:", "# perform average pooling: out = self.avgpool(x) # final classifier:", "configuration. Args: config: A configuration for a FullyConnectedHead. See :func:`__init__`", "of this source tree. from typing import Any, Dict import", "# # This source code is licensed under the MIT", "a 2d average pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed by a fully", "average pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed by a fully connected layer", "attached to a model, and unique_id is used to refer", "zero_init_bias: self.fc.bias.data.zero_() @classmethod def from_config(cls, config: Dict[str, Any]) -> \"FullyConnectedHead\":", "\"\"\" super().__init__(unique_id, num_classes) assert num_classes is None or is_pos_int(num_classes) assert", "if zero_init_bias: self.fc.bias.data.zero_() @classmethod def from_config(cls, config: Dict[str, Any]) ->", "1)) self.fc = None if num_classes is None else nn.Linear(in_plane,", "FullyConnectedHead(ClassyHead): \"\"\"This head defines a 2d average pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`)", "num_classes: Number of classes for the head. If None, then", "super().__init__(unique_id, num_classes) assert num_classes is None or is_pos_int(num_classes) assert is_pos_int(in_plane)", "a FullyConnectedHead. See :func:`__init__` for parameters expected in the config.", "instance. \"\"\" num_classes = config.get(\"num_classes\", None) in_plane = config[\"in_plane\"] return", "MIT license found in the # LICENSE file in the", "affiliates. # # This source code is licensed under the", "Inc. and its affiliates. # # This source code is", "is_pos_int from classy_vision.heads import ClassyHead, register_head @register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead): \"\"\"This", "is used to refer to them. num_classes: Number of classes", "Any]) -> \"FullyConnectedHead\": \"\"\"Instantiates a FullyConnectedHead from a configuration. Args:", "# This source code is licensed under the MIT license", "source tree. from typing import Any, Dict import torch.nn as", "config.get(\"num_classes\", None) in_plane = config[\"in_plane\"] return cls( config[\"unique_id\"], num_classes, in_plane,", "from classy_vision.generic.util import is_pos_int from classy_vision.heads import ClassyHead, register_head @register_head(\"fully_connected\")", "is_pos_int(num_classes) assert is_pos_int(in_plane) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc = None", "@register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead): \"\"\"This head defines a 2d average pooling", "def from_config(cls, config: Dict[str, Any]) -> \"FullyConnectedHead\": \"\"\"Instantiates a FullyConnectedHead", "A unique identifier for the head. Multiple instances of the", "self.fc.bias.data.zero_() @classmethod def from_config(cls, config: Dict[str, Any]) -> \"FullyConnectedHead\": \"\"\"Instantiates", "is None else nn.Linear(in_plane, num_classes) if zero_init_bias: self.fc.bias.data.zero_() @classmethod def", "of the same head might be attached to a model,", "fully connected layer (:class:`torch.nn.Linear`). \"\"\" def __init__( self, unique_id: str,", "model, and unique_id is used to refer to them. num_classes:", "ClassyHead, register_head @register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead): \"\"\"This head defines a 2d", "FullyConnectedHead instance. \"\"\" num_classes = config.get(\"num_classes\", None) in_plane = config[\"in_plane\"]", "classy_vision.generic.util import is_pos_int from classy_vision.heads import ClassyHead, register_head @register_head(\"fully_connected\") class", "None, then the fully connected layer is not applied. in_plane:", "root directory of this source tree. from typing import Any," ]
[ "entry in warning_stations: print(entry[0].name,entry[1]) if __name__ == \"__main__\": print(\"*** Task", "import stations_highest_rel_level def run(): stations = build_station_list() warning_stations = stations_highest_rel_level(stations,10)", "if __name__ == \"__main__\": print(\"*** Task 2C: CUED Part IA", "def run(): stations = build_station_list() warning_stations = stations_highest_rel_level(stations,10) for entry", "\"__main__\": print(\"*** Task 2C: CUED Part IA Flood Warning System", "from floodsystem.flood import stations_highest_rel_level def run(): stations = build_station_list() warning_stations", "print(entry[0].name,entry[1]) if __name__ == \"__main__\": print(\"*** Task 2C: CUED Part", "stations_highest_rel_level def run(): stations = build_station_list() warning_stations = stations_highest_rel_level(stations,10) for", "= stations_highest_rel_level(stations,10) for entry in warning_stations: print(entry[0].name,entry[1]) if __name__ ==", "floodsystem.stationdata import build_station_list from floodsystem.flood import stations_highest_rel_level def run(): stations", "= build_station_list() warning_stations = stations_highest_rel_level(stations,10) for entry in warning_stations: print(entry[0].name,entry[1])", "stations = build_station_list() warning_stations = stations_highest_rel_level(stations,10) for entry in warning_stations:", "== \"__main__\": print(\"*** Task 2C: CUED Part IA Flood Warning", "build_station_list() warning_stations = stations_highest_rel_level(stations,10) for entry in warning_stations: print(entry[0].name,entry[1]) if", "floodsystem.flood import stations_highest_rel_level def run(): stations = build_station_list() warning_stations =", "in warning_stations: print(entry[0].name,entry[1]) if __name__ == \"__main__\": print(\"*** Task 2C:", "warning_stations = stations_highest_rel_level(stations,10) for entry in warning_stations: print(entry[0].name,entry[1]) if __name__", "print(\"*** Task 2C: CUED Part IA Flood Warning System ***\")", "Task 2C: CUED Part IA Flood Warning System ***\") run()", "from floodsystem.stationdata import build_station_list from floodsystem.flood import stations_highest_rel_level def run():", "run(): stations = build_station_list() warning_stations = stations_highest_rel_level(stations,10) for entry in", "warning_stations: print(entry[0].name,entry[1]) if __name__ == \"__main__\": print(\"*** Task 2C: CUED", "build_station_list from floodsystem.flood import stations_highest_rel_level def run(): stations = build_station_list()", "for entry in warning_stations: print(entry[0].name,entry[1]) if __name__ == \"__main__\": print(\"***", "import build_station_list from floodsystem.flood import stations_highest_rel_level def run(): stations =", "stations_highest_rel_level(stations,10) for entry in warning_stations: print(entry[0].name,entry[1]) if __name__ == \"__main__\":", "__name__ == \"__main__\": print(\"*** Task 2C: CUED Part IA Flood" ]
[ "`__copy_create__()` method. All variables, that could not be set via", "objects, that should be copyable. The public method `copy()` first", "constructor takes parameters. Returns ------- copy A freshly instantiated copy", "set via the constructor, are then copied via `__copy_fill__()`, starting", "All variables, that could not be set via the constructor,", "the `super()` method here. This method must be overridden, if", "and is distributed # under the 3-Clause BSD License. Please", "copy of *self*. \"\"\" return type(self)() def __copy_fill__(self, clone): \"\"\"", "instance to be copied. This approach solves the problem of", "object of this class. Only the constructor should be called", "method `copy()` first creates a fresh instance of the class", "of this class. Only the constructor should be called in", "information. __name__ = \"biotite\" __author__ = \"<NAME>\" __all__ = [\"Copyable\"]", "problem of encapsulated variables in superclasses. \"\"\" def copy(self): \"\"\"", "A copy of this object. \"\"\" clone = self.__copy_create__() self.__copy_fill__(clone)", "\"\"\" def copy(self): \"\"\" Create a deep copy of this", "method must be overridden, if the constructor takes parameters. Returns", "must be overridden, if the constructor takes parameters. Returns -------", "a new object of this class. Only the constructor should", "a fresh instance of the class of the instance, that", "creates a fresh instance of the class of the instance,", "to be copied are handled in `__copy_fill__()` Do not call", "Do not call the `super()` method here. This method must", "via the `__copy_create__()` method. All variables, that could not be", "copied are handled in `__copy_fill__()` Do not call the `super()`", "and ending with the class of the instance to be", "this object. \"\"\" clone = self.__copy_create__() self.__copy_fill__(clone) return clone def", "\"\"\" Base class for all objects, that should be copyable.", "method. All variables, that could not be set via the", "the constructor, are then copied via `__copy_fill__()`, starting with the", "the `__copy_create__()` method. All variables, that could not be set", "solves the problem of encapsulated variables in superclasses. \"\"\" def", "A freshly instantiated copy of *self*. \"\"\" return type(self)() def", "base class and ending with the class of the instance", "__copy_fill__(self, clone): \"\"\" Copy all necessary attributes to the new", "instance of the class of the instance, that is copied", "deep copy of this object. Returns ------- copy A copy", "return type(self)() def __copy_fill__(self, clone): \"\"\" Copy all necessary attributes", "not be set via the constructor, are then copied via", "if the constructor takes parameters. Returns ------- copy A freshly", "The public method `copy()` first creates a fresh instance of", "of the class of the instance, that is copied via", "then copied via `__copy_fill__()`, starting with the method in the", "class and ending with the class of the instance to", "are then copied via `__copy_fill__()`, starting with the method in", "method in the uppermost base class and ending with the", "instantiated copy of *self*. \"\"\" return type(self)() def __copy_fill__(self, clone):", "Create a deep copy of this object. Returns ------- copy", "that is copied via the `__copy_create__()` method. All variables, that", "= \"biotite\" __author__ = \"<NAME>\" __all__ = [\"Copyable\"] import abc", "this method. All further attributes, that need to be copied", "of the Biotite package and is distributed # under the", "attributes to the new object. Always call the `super()` method", "with the method in the uppermost base class and ending", "the instance, that is copied via the `__copy_create__()` method. All", "are handled in `__copy_fill__()` Do not call the `super()` method", "distributed # under the 3-Clause BSD License. Please see 'LICENSE.rst'", "= self.__copy_create__() self.__copy_fill__(clone) return clone def __copy_create__(self): \"\"\" Instantiate a", "this object. Returns ------- copy A copy of this object.", "# This source code is part of the Biotite package", "first statement. Parameters ---------- clone The freshly instantiated copy of", "of the instance to be copied. This approach solves the", "encapsulated variables in superclasses. \"\"\" def copy(self): \"\"\" Create a", "---------- clone The freshly instantiated copy of *self*. \"\"\" pass", "Base class for all objects, that should be copyable. The", "copy(self): \"\"\" Create a deep copy of this object. Returns", "`__copy_fill__()` Do not call the `super()` method here. This method", "parameters. Returns ------- copy A freshly instantiated copy of *self*.", "of the instance, that is copied via the `__copy_create__()` method.", "via `__copy_fill__()`, starting with the method in the uppermost base", "def copy(self): \"\"\" Create a deep copy of this object.", "clone = self.__copy_create__() self.__copy_fill__(clone) return clone def __copy_create__(self): \"\"\" Instantiate", "return clone def __copy_create__(self): \"\"\" Instantiate a new object of", "the 3-Clause BSD License. Please see 'LICENSE.rst' for further #", "to the new object. Always call the `super()` method as", "the new object. Always call the `super()` method as first", "= \"<NAME>\" __all__ = [\"Copyable\"] import abc class Copyable(metaclass=abc.ABCMeta): \"\"\"", "new object. Always call the `super()` method as first statement.", "Instantiate a new object of this class. Only the constructor", "via the constructor, are then copied via `__copy_fill__()`, starting with", "uppermost base class and ending with the class of the", "Copyable(metaclass=abc.ABCMeta): \"\"\" Base class for all objects, that should be", "of this object. \"\"\" clone = self.__copy_create__() self.__copy_fill__(clone) return clone", "__author__ = \"<NAME>\" __all__ = [\"Copyable\"] import abc class Copyable(metaclass=abc.ABCMeta):", "that could not be set via the constructor, are then", "attributes, that need to be copied are handled in `__copy_fill__()`", "of *self*. \"\"\" return type(self)() def __copy_fill__(self, clone): \"\"\" Copy", "all objects, that should be copyable. The public method `copy()`", "the method in the uppermost base class and ending with", "the constructor should be called in this method. All further", "'LICENSE.rst' for further # information. __name__ = \"biotite\" __author__ =", "need to be copied are handled in `__copy_fill__()` Do not", "for further # information. __name__ = \"biotite\" __author__ = \"<NAME>\"", "*self*. \"\"\" return type(self)() def __copy_fill__(self, clone): \"\"\" Copy all", "= [\"Copyable\"] import abc class Copyable(metaclass=abc.ABCMeta): \"\"\" Base class for", "the Biotite package and is distributed # under the 3-Clause", "the class of the instance, that is copied via the", "Returns ------- copy A copy of this object. \"\"\" clone", "Please see 'LICENSE.rst' for further # information. __name__ = \"biotite\"", "method as first statement. Parameters ---------- clone The freshly instantiated", "------- copy A copy of this object. \"\"\" clone =", "statement. Parameters ---------- clone The freshly instantiated copy of *self*.", "approach solves the problem of encapsulated variables in superclasses. \"\"\"", "`super()` method as first statement. Parameters ---------- clone The freshly", "class for all objects, that should be copyable. The public", "is copied via the `__copy_create__()` method. All variables, that could", "in the uppermost base class and ending with the class", "the instance to be copied. This approach solves the problem", "variables, that could not be set via the constructor, are", "overridden, if the constructor takes parameters. Returns ------- copy A", "further # information. __name__ = \"biotite\" __author__ = \"<NAME>\" __all__", "copied via `__copy_fill__()`, starting with the method in the uppermost", "for all objects, that should be copyable. The public method", "clone): \"\"\" Copy all necessary attributes to the new object.", "`super()` method here. This method must be overridden, if the", "# information. __name__ = \"biotite\" __author__ = \"<NAME>\" __all__ =", "copy of this object. Returns ------- copy A copy of", "further attributes, that need to be copied are handled in", "source code is part of the Biotite package and is", "be overridden, if the constructor takes parameters. Returns ------- copy", "all necessary attributes to the new object. Always call the", "`copy()` first creates a fresh instance of the class of", "object. \"\"\" clone = self.__copy_create__() self.__copy_fill__(clone) return clone def __copy_create__(self):", "Copy all necessary attributes to the new object. Always call", "This method must be overridden, if the constructor takes parameters.", "fresh instance of the class of the instance, that is", "class of the instance, that is copied via the `__copy_create__()`", "that need to be copied are handled in `__copy_fill__()` Do", "be copied. This approach solves the problem of encapsulated variables", "copy A freshly instantiated copy of *self*. \"\"\" return type(self)()", "Parameters ---------- clone The freshly instantiated copy of *self*. \"\"\"", "with the class of the instance to be copied. This", "call the `super()` method here. This method must be overridden,", "self.__copy_create__() self.__copy_fill__(clone) return clone def __copy_create__(self): \"\"\" Instantiate a new", "\"\"\" Copy all necessary attributes to the new object. Always", "License. Please see 'LICENSE.rst' for further # information. __name__ =", "[\"Copyable\"] import abc class Copyable(metaclass=abc.ABCMeta): \"\"\" Base class for all", "copied via the `__copy_create__()` method. All variables, that could not", "be called in this method. All further attributes, that need", "__name__ = \"biotite\" __author__ = \"<NAME>\" __all__ = [\"Copyable\"] import", "the constructor takes parameters. Returns ------- copy A freshly instantiated", "is distributed # under the 3-Clause BSD License. Please see", "should be copyable. The public method `copy()` first creates a", "be set via the constructor, are then copied via `__copy_fill__()`,", "This source code is part of the Biotite package and", "copy A copy of this object. \"\"\" clone = self.__copy_create__()", "------- copy A freshly instantiated copy of *self*. \"\"\" return", "the problem of encapsulated variables in superclasses. \"\"\" def copy(self):", "\"\"\" Create a deep copy of this object. Returns -------", "should be called in this method. All further attributes, that", "Returns ------- copy A freshly instantiated copy of *self*. \"\"\"", "in this method. All further attributes, that need to be", "here. This method must be overridden, if the constructor takes", "under the 3-Clause BSD License. Please see 'LICENSE.rst' for further", "this class. Only the constructor should be called in this", "All further attributes, that need to be copied are handled", "method. All further attributes, that need to be copied are", "public method `copy()` first creates a fresh instance of the", "takes parameters. Returns ------- copy A freshly instantiated copy of", "ending with the class of the instance to be copied.", "copy of this object. \"\"\" clone = self.__copy_create__() self.__copy_fill__(clone) return", "\"biotite\" __author__ = \"<NAME>\" __all__ = [\"Copyable\"] import abc class", "to be copied. This approach solves the problem of encapsulated", "be copyable. The public method `copy()` first creates a fresh", "This approach solves the problem of encapsulated variables in superclasses.", "new object of this class. Only the constructor should be", "in `__copy_fill__()` Do not call the `super()` method here. This", "code is part of the Biotite package and is distributed", "that should be copyable. The public method `copy()` first creates", "as first statement. Parameters ---------- clone The freshly instantiated copy", "Biotite package and is distributed # under the 3-Clause BSD", "instance, that is copied via the `__copy_create__()` method. All variables,", "clone def __copy_create__(self): \"\"\" Instantiate a new object of this", "copyable. The public method `copy()` first creates a fresh instance", "called in this method. All further attributes, that need to", "be copied are handled in `__copy_fill__()` Do not call the", "Always call the `super()` method as first statement. Parameters ----------", "call the `super()` method as first statement. Parameters ---------- clone", "package and is distributed # under the 3-Clause BSD License.", "def __copy_create__(self): \"\"\" Instantiate a new object of this class.", "the class of the instance to be copied. This approach", "constructor should be called in this method. All further attributes,", "could not be set via the constructor, are then copied", "a deep copy of this object. Returns ------- copy A", "3-Clause BSD License. Please see 'LICENSE.rst' for further # information.", "class. Only the constructor should be called in this method.", "\"\"\" return type(self)() def __copy_fill__(self, clone): \"\"\" Copy all necessary", "__all__ = [\"Copyable\"] import abc class Copyable(metaclass=abc.ABCMeta): \"\"\" Base class", "not call the `super()` method here. This method must be", "def __copy_fill__(self, clone): \"\"\" Copy all necessary attributes to the", "class Copyable(metaclass=abc.ABCMeta): \"\"\" Base class for all objects, that should", "copied. This approach solves the problem of encapsulated variables in", "variables in superclasses. \"\"\" def copy(self): \"\"\" Create a deep", "handled in `__copy_fill__()` Do not call the `super()` method here.", "first creates a fresh instance of the class of the", "<filename>src/biotite/copyable.py # This source code is part of the Biotite", "\"\"\" Instantiate a new object of this class. Only the", "see 'LICENSE.rst' for further # information. __name__ = \"biotite\" __author__", "is part of the Biotite package and is distributed #", "freshly instantiated copy of *self*. \"\"\" return type(self)() def __copy_fill__(self,", "the uppermost base class and ending with the class of", "\"\"\" clone = self.__copy_create__() self.__copy_fill__(clone) return clone def __copy_create__(self): \"\"\"", "object. Returns ------- copy A copy of this object. \"\"\"", "__copy_create__(self): \"\"\" Instantiate a new object of this class. Only", "type(self)() def __copy_fill__(self, clone): \"\"\" Copy all necessary attributes to", "part of the Biotite package and is distributed # under", "of this object. Returns ------- copy A copy of this", "\"<NAME>\" __all__ = [\"Copyable\"] import abc class Copyable(metaclass=abc.ABCMeta): \"\"\" Base", "Only the constructor should be called in this method. All", "abc class Copyable(metaclass=abc.ABCMeta): \"\"\" Base class for all objects, that", "necessary attributes to the new object. Always call the `super()`", "import abc class Copyable(metaclass=abc.ABCMeta): \"\"\" Base class for all objects,", "BSD License. Please see 'LICENSE.rst' for further # information. __name__", "method here. This method must be overridden, if the constructor", "`__copy_fill__()`, starting with the method in the uppermost base class", "# under the 3-Clause BSD License. Please see 'LICENSE.rst' for", "constructor, are then copied via `__copy_fill__()`, starting with the method", "starting with the method in the uppermost base class and", "in superclasses. \"\"\" def copy(self): \"\"\" Create a deep copy", "superclasses. \"\"\" def copy(self): \"\"\" Create a deep copy of", "the `super()` method as first statement. Parameters ---------- clone The", "of encapsulated variables in superclasses. \"\"\" def copy(self): \"\"\" Create", "object. Always call the `super()` method as first statement. Parameters", "self.__copy_fill__(clone) return clone def __copy_create__(self): \"\"\" Instantiate a new object", "class of the instance to be copied. This approach solves" ]
[ "import HomeAssistant from wyzeapy.base_client import Device, AccessTokenError from wyzeapy.client import", "= latest_event.event_ts else: self._on = False self._last_event = latest_event.event_ts else:", "-> List[Device]: try: return client.get_cameras() except AccessTokenError as e: _LOGGER.warning(e)", "latest_event = self._client.get_latest_event(self._device) if latest_event is not None: if latest_event.event_ts", "}, \"name\": self.name, \"manufacturer\": \"WyzeLabs\", \"model\": self._device.product_model } @property def", "from wyzeapy.client import Client from wyzeapy.types import PropertyIDs from .const", "True) class WyzeCameraMotion(BinarySensorEntity): _on: bool _available: bool def __init__(self, wyzeapi_client:", "is_on(self): \"\"\"Return true if switch is on.\"\"\" return self._on @property", "from wyzeapy.base_client import Device, AccessTokenError from wyzeapy.client import Client from", "\"model\": self._device.product_model } @property def available(self) -> bool: return self._available", "[WyzeCameraMotion(client, camera) for camera in await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True) class", "self._client.get_info(self._device) except AccessTokenError: self._client.reauthenticate() device_info = self._client.get_info(self._device) for property_id, value", "provided by Wyze\" SCAN_INTERVAL = timedelta(seconds=10) async def async_setup_entry(hass: HomeAssistant,", "from .const import DOMAIN _LOGGER = logging.getLogger(__name__) ATTRIBUTION = \"Data", "else: self._on = False self._last_event = latest_event.event_ts else: self._on =", "self._device.product_model, \"mac\": self.unique_id } @property def device_class(self): return DEVICE_CLASS_MOTION def", "@property def is_on(self): \"\"\"Return true if switch is on.\"\"\" return", "DOMAIN _LOGGER = logging.getLogger(__name__) ATTRIBUTION = \"Data provided by Wyze\"", "homeassistant.core import HomeAssistant from wyzeapy.base_client import Device, AccessTokenError from wyzeapy.client", "by Wyze\" SCAN_INTERVAL = timedelta(seconds=10) async def async_setup_entry(hass: HomeAssistant, config_entry:", "ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant from wyzeapy.base_client import Device, AccessTokenError", "def __init__(self, wyzeapi_client: Client, device: Device): self._client = wyzeapi_client self._device", "False latest_event = self._client.get_latest_event(self._device) if latest_event is not None: if", "latest_event.event_ts > self._last_event: self._on = True self._last_event = latest_event.event_ts else:", "def update(self): try: device_info = self._client.get_info(self._device) except AccessTokenError: self._client.reauthenticate() device_info", "try: device_info = self._client.get_info(self._device) except AccessTokenError: self._client.reauthenticate() device_info = self._client.get_info(self._device)", "model\": self._device.product_model, \"mac\": self.unique_id } @property def device_class(self): return DEVICE_CLASS_MOTION", "device_class(self): return DEVICE_CLASS_MOTION def update(self): try: device_info = self._client.get_info(self._device) except", "= True if value == \"1\" else False latest_event =", "is on.\"\"\" return self._on @property def unique_id(self): return \"{}-motion\".format(self._device.mac) @property", "homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import", "time from datetime import timedelta from typing import List from", "if latest_event.event_ts > self._last_event: self._on = True self._last_event = latest_event.event_ts", "device_info = self._client.get_info(self._device) for property_id, value in device_info: if property_id", "from homeassistant.core import HomeAssistant from wyzeapy.base_client import Device, AccessTokenError from", "e: _LOGGER.warning(e) client.reauthenticate() return client.get_cameras() cameras = [WyzeCameraMotion(client, camera) for", "\"\"\"Return device attributes of the entity.\"\"\" return { ATTR_ATTRIBUTION: ATTRIBUTION,", "try: return client.get_cameras() except AccessTokenError as e: _LOGGER.warning(e) client.reauthenticate() return", "switch is on.\"\"\" return self._on @property def unique_id(self): return \"{}-motion\".format(self._device.mac)", "import DOMAIN _LOGGER = logging.getLogger(__name__) ATTRIBUTION = \"Data provided by", "if value == \"1\" else False latest_event = self._client.get_latest_event(self._device) if", "PropertyIDs from .const import DOMAIN _LOGGER = logging.getLogger(__name__) ATTRIBUTION =", "{ (DOMAIN, self._device.mac) }, \"name\": self.name, \"manufacturer\": \"WyzeLabs\", \"model\": self._device.product_model", "of this switch.\"\"\" return self._device.nickname @property def is_on(self): \"\"\"Return true", "device_info = self._client.get_info(self._device) except AccessTokenError: self._client.reauthenticate() device_info = self._client.get_info(self._device) for", "async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating new WyzeApi binary sensor", "import logging import time from datetime import timedelta from typing", "_LOGGER.warning(e) client.reauthenticate() return client.get_cameras() cameras = [WyzeCameraMotion(client, camera) for camera", "self._available = True if value == \"1\" else False latest_event", "else False latest_event = self._client.get_latest_event(self._device) if latest_event is not None:", "wyzeapi_client self._device = device self._last_event = int(str(int(time.time())) + \"000\") @property", "entity.\"\"\" return { ATTR_ATTRIBUTION: ATTRIBUTION, \"state\": self.is_on, \"available\": self.available, \"device", "return { ATTR_ATTRIBUTION: ATTRIBUTION, \"state\": self.is_on, \"available\": self.available, \"device model\":", "DEVICE_CLASS_MOTION ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ATTRIBUTION", "def is_on(self): \"\"\"Return true if switch is on.\"\"\" return self._on", "self.unique_id } @property def device_class(self): return DEVICE_CLASS_MOTION def update(self): try:", "\"Data provided by Wyze\" SCAN_INTERVAL = timedelta(seconds=10) async def async_setup_entry(hass:", "return \"{}-motion\".format(self._device.mac) @property def device_state_attributes(self): \"\"\"Return device attributes of the", "await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True) class WyzeCameraMotion(BinarySensorEntity): _on: bool _available: bool", "== \"1\" else False latest_event = self._client.get_latest_event(self._device) if latest_event is", "\"\"\"Return true if switch is on.\"\"\" return self._on @property def", "HomeAssistant, config_entry: ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating new WyzeApi binary sensor component\"\"\")", ".const import DOMAIN _LOGGER = logging.getLogger(__name__) ATTRIBUTION = \"Data provided", "(DOMAIN, self._device.mac) }, \"name\": self.name, \"manufacturer\": \"WyzeLabs\", \"model\": self._device.product_model }", "latest_event is not None: if latest_event.event_ts > self._last_event: self._on =", "the display name of this switch.\"\"\" return self._device.nickname @property def", "self.available, \"device model\": self._device.product_model, \"mac\": self.unique_id } @property def device_class(self):", "name(self): \"\"\"Return the display name of this switch.\"\"\" return self._device.nickname", "import ( BinarySensorEntity, DEVICE_CLASS_MOTION ) from homeassistant.config_entries import ConfigEntry from", "self._device.product_model } @property def available(self) -> bool: return self._available @property", "of the entity.\"\"\" return { ATTR_ATTRIBUTION: ATTRIBUTION, \"state\": self.is_on, \"available\":", "\"1\" else False latest_event = self._client.get_latest_event(self._device) if latest_event is not", "= [WyzeCameraMotion(client, camera) for camera in await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True)", "device attributes of the entity.\"\"\" return { ATTR_ATTRIBUTION: ATTRIBUTION, \"state\":", "self._client.get_latest_event(self._device) if latest_event is not None: if latest_event.event_ts > self._last_event:", "def device_info(self): return { \"identifiers\": { (DOMAIN, self._device.mac) }, \"name\":", "unique_id(self): return \"{}-motion\".format(self._device.mac) @property def device_state_attributes(self): \"\"\"Return device attributes of", "from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core", "return self._device.nickname @property def is_on(self): \"\"\"Return true if switch is", "AccessTokenError from wyzeapy.client import Client from wyzeapy.types import PropertyIDs from", "\"{}-motion\".format(self._device.mac) @property def device_state_attributes(self): \"\"\"Return device attributes of the entity.\"\"\"", "} @property def device_class(self): return DEVICE_CLASS_MOTION def update(self): try: device_info", "is not None: if latest_event.event_ts > self._last_event: self._on = True", "AccessTokenError: self._client.reauthenticate() device_info = self._client.get_info(self._device) for property_id, value in device_info:", "this switch.\"\"\" return self._device.nickname @property def is_on(self): \"\"\"Return true if", "None: if latest_event.event_ts > self._last_event: self._on = True self._last_event =", "WyzeCameraMotion(BinarySensorEntity): _on: bool _available: bool def __init__(self, wyzeapi_client: Client, device:", "ConfigEntry from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant from", "return self._available @property def name(self): \"\"\"Return the display name of", "logging.getLogger(__name__) ATTRIBUTION = \"Data provided by Wyze\" SCAN_INTERVAL = timedelta(seconds=10)", "Client = hass.data[DOMAIN][config_entry.entry_id] def get_cameras() -> List[Device]: try: return client.get_cameras()", "device_info: if property_id == PropertyIDs.AVAILABLE: self._available = True if value", "self._last_event = latest_event.event_ts else: self._on = False self._last_event = latest_event.event_ts", "+ \"000\") @property def device_info(self): return { \"identifiers\": { (DOMAIN,", "@property def device_state_attributes(self): \"\"\"Return device attributes of the entity.\"\"\" return", "self._device.mac) }, \"name\": self.name, \"manufacturer\": \"WyzeLabs\", \"model\": self._device.product_model } @property", "async_add_entities): _LOGGER.debug(\"\"\"Creating new WyzeApi binary sensor component\"\"\") client: Client =", "List from homeassistant.components.binary_sensor import ( BinarySensorEntity, DEVICE_CLASS_MOTION ) from homeassistant.config_entries", "bool: return self._available @property def name(self): \"\"\"Return the display name", "value in device_info: if property_id == PropertyIDs.AVAILABLE: self._available = True", "True if value == \"1\" else False latest_event = self._client.get_latest_event(self._device)", "wyzeapy.client import Client from wyzeapy.types import PropertyIDs from .const import", "( BinarySensorEntity, DEVICE_CLASS_MOTION ) from homeassistant.config_entries import ConfigEntry from homeassistant.const", "_LOGGER.debug(\"\"\"Creating new WyzeApi binary sensor component\"\"\") client: Client = hass.data[DOMAIN][config_entry.entry_id]", "import time from datetime import timedelta from typing import List", "component\"\"\") client: Client = hass.data[DOMAIN][config_entry.entry_id] def get_cameras() -> List[Device]: try:", "value == \"1\" else False latest_event = self._client.get_latest_event(self._device) if latest_event", "cameras = [WyzeCameraMotion(client, camera) for camera in await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras,", "logging import time from datetime import timedelta from typing import", "self._on = True self._last_event = latest_event.event_ts else: self._on = False", "async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating new WyzeApi", "device_state_attributes(self): \"\"\"Return device attributes of the entity.\"\"\" return { ATTR_ATTRIBUTION:", "\"\"\"Return the display name of this switch.\"\"\" return self._device.nickname @property", "_LOGGER = logging.getLogger(__name__) ATTRIBUTION = \"Data provided by Wyze\" SCAN_INTERVAL", "\"mac\": self.unique_id } @property def device_class(self): return DEVICE_CLASS_MOTION def update(self):", "Wyze\" SCAN_INTERVAL = timedelta(seconds=10) async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry,", "def get_cameras() -> List[Device]: try: return client.get_cameras() except AccessTokenError as", "_available: bool def __init__(self, wyzeapi_client: Client, device: Device): self._client =", "class WyzeCameraMotion(BinarySensorEntity): _on: bool _available: bool def __init__(self, wyzeapi_client: Client,", "wyzeapi_client: Client, device: Device): self._client = wyzeapi_client self._device = device", "typing import List from homeassistant.components.binary_sensor import ( BinarySensorEntity, DEVICE_CLASS_MOTION )", "\"device model\": self._device.product_model, \"mac\": self.unique_id } @property def device_class(self): return", "client.get_cameras() except AccessTokenError as e: _LOGGER.warning(e) client.reauthenticate() return client.get_cameras() cameras", "client.get_cameras() cameras = [WyzeCameraMotion(client, camera) for camera in await hass.async_add_executor_job(get_cameras)]", "from datetime import timedelta from typing import List from homeassistant.components.binary_sensor", "\"identifiers\": { (DOMAIN, self._device.mac) }, \"name\": self.name, \"manufacturer\": \"WyzeLabs\", \"model\":", "self.is_on, \"available\": self.available, \"device model\": self._device.product_model, \"mac\": self.unique_id } @property", "= self._client.get_info(self._device) except AccessTokenError: self._client.reauthenticate() device_info = self._client.get_info(self._device) for property_id,", "= True self._last_event = latest_event.event_ts else: self._on = False self._last_event", "= timedelta(seconds=10) async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating", "import timedelta from typing import List from homeassistant.components.binary_sensor import (", "self._device = device self._last_event = int(str(int(time.time())) + \"000\") @property def", "def unique_id(self): return \"{}-motion\".format(self._device.mac) @property def device_state_attributes(self): \"\"\"Return device attributes", "import Client from wyzeapy.types import PropertyIDs from .const import DOMAIN", "= self._client.get_latest_event(self._device) if latest_event is not None: if latest_event.event_ts >", "return self._on @property def unique_id(self): return \"{}-motion\".format(self._device.mac) @property def device_state_attributes(self):", "{ \"identifiers\": { (DOMAIN, self._device.mac) }, \"name\": self.name, \"manufacturer\": \"WyzeLabs\",", "HomeAssistant from wyzeapy.base_client import Device, AccessTokenError from wyzeapy.client import Client", "List[Device]: try: return client.get_cameras() except AccessTokenError as e: _LOGGER.warning(e) client.reauthenticate()", "= \"Data provided by Wyze\" SCAN_INTERVAL = timedelta(seconds=10) async def", "\"name\": self.name, \"manufacturer\": \"WyzeLabs\", \"model\": self._device.product_model } @property def available(self)", "\"WyzeLabs\", \"model\": self._device.product_model } @property def available(self) -> bool: return", "display name of this switch.\"\"\" return self._device.nickname @property def is_on(self):", "name of this switch.\"\"\" return self._device.nickname @property def is_on(self): \"\"\"Return", "attributes of the entity.\"\"\" return { ATTR_ATTRIBUTION: ATTRIBUTION, \"state\": self.is_on,", "client.reauthenticate() return client.get_cameras() cameras = [WyzeCameraMotion(client, camera) for camera in", "for property_id, value in device_info: if property_id == PropertyIDs.AVAILABLE: self._available", "if property_id == PropertyIDs.AVAILABLE: self._available = True if value ==", "@property def device_info(self): return { \"identifiers\": { (DOMAIN, self._device.mac) },", "wyzeapy.types import PropertyIDs from .const import DOMAIN _LOGGER = logging.getLogger(__name__)", "self._available @property def name(self): \"\"\"Return the display name of this", "new WyzeApi binary sensor component\"\"\") client: Client = hass.data[DOMAIN][config_entry.entry_id] def", "@property def unique_id(self): return \"{}-motion\".format(self._device.mac) @property def device_state_attributes(self): \"\"\"Return device", "except AccessTokenError: self._client.reauthenticate() device_info = self._client.get_info(self._device) for property_id, value in", "on.\"\"\" return self._on @property def unique_id(self): return \"{}-motion\".format(self._device.mac) @property def", "sensor component\"\"\") client: Client = hass.data[DOMAIN][config_entry.entry_id] def get_cameras() -> List[Device]:", "return { \"identifiers\": { (DOMAIN, self._device.mac) }, \"name\": self.name, \"manufacturer\":", "= self._client.get_info(self._device) for property_id, value in device_info: if property_id ==", "def device_class(self): return DEVICE_CLASS_MOTION def update(self): try: device_info = self._client.get_info(self._device)", "ATTRIBUTION, \"state\": self.is_on, \"available\": self.available, \"device model\": self._device.product_model, \"mac\": self.unique_id", "AccessTokenError as e: _LOGGER.warning(e) client.reauthenticate() return client.get_cameras() cameras = [WyzeCameraMotion(client,", "@property def device_class(self): return DEVICE_CLASS_MOTION def update(self): try: device_info =", "_on: bool _available: bool def __init__(self, wyzeapi_client: Client, device: Device):", "int(str(int(time.time())) + \"000\") @property def device_info(self): return { \"identifiers\": {", "property_id == PropertyIDs.AVAILABLE: self._available = True if value == \"1\"", "BinarySensorEntity, DEVICE_CLASS_MOTION ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import", "= logging.getLogger(__name__) ATTRIBUTION = \"Data provided by Wyze\" SCAN_INTERVAL =", "Client, device: Device): self._client = wyzeapi_client self._device = device self._last_event", "camera in await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True) class WyzeCameraMotion(BinarySensorEntity): _on: bool", "= device self._last_event = int(str(int(time.time())) + \"000\") @property def device_info(self):", "from homeassistant.components.binary_sensor import ( BinarySensorEntity, DEVICE_CLASS_MOTION ) from homeassistant.config_entries import", "update(self): try: device_info = self._client.get_info(self._device) except AccessTokenError: self._client.reauthenticate() device_info =", "> self._last_event: self._on = True self._last_event = latest_event.event_ts else: self._on", "self._on = False self._last_event = latest_event.event_ts else: self._on = False", "camera) for camera in await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True) class WyzeCameraMotion(BinarySensorEntity):", "def available(self) -> bool: return self._available @property def name(self): \"\"\"Return", "ATTR_ATTRIBUTION: ATTRIBUTION, \"state\": self.is_on, \"available\": self.available, \"device model\": self._device.product_model, \"mac\":", "} @property def available(self) -> bool: return self._available @property def", "true if switch is on.\"\"\" return self._on @property def unique_id(self):", "async_add_entities(cameras, True) class WyzeCameraMotion(BinarySensorEntity): _on: bool _available: bool def __init__(self,", "for camera in await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True) class WyzeCameraMotion(BinarySensorEntity): _on:", "\"available\": self.available, \"device model\": self._device.product_model, \"mac\": self.unique_id } @property def", "import List from homeassistant.components.binary_sensor import ( BinarySensorEntity, DEVICE_CLASS_MOTION ) from", "self._on @property def unique_id(self): return \"{}-motion\".format(self._device.mac) @property def device_state_attributes(self): \"\"\"Return", "import Device, AccessTokenError from wyzeapy.client import Client from wyzeapy.types import", "import ConfigEntry from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant", "self._last_event = int(str(int(time.time())) + \"000\") @property def device_info(self): return {", "timedelta(seconds=10) async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating new", "not None: if latest_event.event_ts > self._last_event: self._on = True self._last_event", "Device, AccessTokenError from wyzeapy.client import Client from wyzeapy.types import PropertyIDs", "ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating new WyzeApi binary sensor component\"\"\") client: Client", "switch.\"\"\" return self._device.nickname @property def is_on(self): \"\"\"Return true if switch", "property_id, value in device_info: if property_id == PropertyIDs.AVAILABLE: self._available =", "import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant from wyzeapy.base_client import Device,", "self._last_event: self._on = True self._last_event = latest_event.event_ts else: self._on =", "= hass.data[DOMAIN][config_entry.entry_id] def get_cameras() -> List[Device]: try: return client.get_cameras() except", ") from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ATTRIBUTION from", "import PropertyIDs from .const import DOMAIN _LOGGER = logging.getLogger(__name__) ATTRIBUTION", "homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant from wyzeapy.base_client import", "except AccessTokenError as e: _LOGGER.warning(e) client.reauthenticate() return client.get_cameras() cameras =", "wyzeapy.base_client import Device, AccessTokenError from wyzeapy.client import Client from wyzeapy.types", "== PropertyIDs.AVAILABLE: self._available = True if value == \"1\" else", "homeassistant.components.binary_sensor import ( BinarySensorEntity, DEVICE_CLASS_MOTION ) from homeassistant.config_entries import ConfigEntry", "SCAN_INTERVAL = timedelta(seconds=10) async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities):", "@property def name(self): \"\"\"Return the display name of this switch.\"\"\"", "the entity.\"\"\" return { ATTR_ATTRIBUTION: ATTRIBUTION, \"state\": self.is_on, \"available\": self.available,", "if latest_event is not None: if latest_event.event_ts > self._last_event: self._on", "def name(self): \"\"\"Return the display name of this switch.\"\"\" return", "client: Client = hass.data[DOMAIN][config_entry.entry_id] def get_cameras() -> List[Device]: try: return", "device_info(self): return { \"identifiers\": { (DOMAIN, self._device.mac) }, \"name\": self.name,", "True self._last_event = latest_event.event_ts else: self._on = False self._last_event =", "latest_event.event_ts else: self._on = False self._last_event = latest_event.event_ts else: self._on", "config_entry: ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating new WyzeApi binary sensor component\"\"\") client:", "available(self) -> bool: return self._available @property def name(self): \"\"\"Return the", "self.name, \"manufacturer\": \"WyzeLabs\", \"model\": self._device.product_model } @property def available(self) ->", "Device): self._client = wyzeapi_client self._device = device self._last_event = int(str(int(time.time()))", "self._device.nickname @property def is_on(self): \"\"\"Return true if switch is on.\"\"\"", "-> bool: return self._available @property def name(self): \"\"\"Return the display", "from wyzeapy.types import PropertyIDs from .const import DOMAIN _LOGGER =", "@property def available(self) -> bool: return self._available @property def name(self):", "self._client.reauthenticate() device_info = self._client.get_info(self._device) for property_id, value in device_info: if", "timedelta from typing import List from homeassistant.components.binary_sensor import ( BinarySensorEntity,", "from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant from wyzeapy.base_client", "return DEVICE_CLASS_MOTION def update(self): try: device_info = self._client.get_info(self._device) except AccessTokenError:", "in device_info: if property_id == PropertyIDs.AVAILABLE: self._available = True if", "Client from wyzeapy.types import PropertyIDs from .const import DOMAIN _LOGGER", "device: Device): self._client = wyzeapi_client self._device = device self._last_event =", "def device_state_attributes(self): \"\"\"Return device attributes of the entity.\"\"\" return {", "if switch is on.\"\"\" return self._on @property def unique_id(self): return", "PropertyIDs.AVAILABLE: self._available = True if value == \"1\" else False", "binary sensor component\"\"\") client: Client = hass.data[DOMAIN][config_entry.entry_id] def get_cameras() ->", "ATTRIBUTION = \"Data provided by Wyze\" SCAN_INTERVAL = timedelta(seconds=10) async", "def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating new WyzeApi binary", "as e: _LOGGER.warning(e) client.reauthenticate() return client.get_cameras() cameras = [WyzeCameraMotion(client, camera)", "self._client = wyzeapi_client self._device = device self._last_event = int(str(int(time.time())) +", "bool _available: bool def __init__(self, wyzeapi_client: Client, device: Device): self._client", "\"manufacturer\": \"WyzeLabs\", \"model\": self._device.product_model } @property def available(self) -> bool:", "return client.get_cameras() except AccessTokenError as e: _LOGGER.warning(e) client.reauthenticate() return client.get_cameras()", "datetime import timedelta from typing import List from homeassistant.components.binary_sensor import", "in await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True) class WyzeCameraMotion(BinarySensorEntity): _on: bool _available:", "hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True) class WyzeCameraMotion(BinarySensorEntity): _on: bool _available: bool def", "\"state\": self.is_on, \"available\": self.available, \"device model\": self._device.product_model, \"mac\": self.unique_id }", "= int(str(int(time.time())) + \"000\") @property def device_info(self): return { \"identifiers\":", "get_cameras() -> List[Device]: try: return client.get_cameras() except AccessTokenError as e:", "__init__(self, wyzeapi_client: Client, device: Device): self._client = wyzeapi_client self._device =", "self._client.get_info(self._device) for property_id, value in device_info: if property_id == PropertyIDs.AVAILABLE:", "from typing import List from homeassistant.components.binary_sensor import ( BinarySensorEntity, DEVICE_CLASS_MOTION", "\"000\") @property def device_info(self): return { \"identifiers\": { (DOMAIN, self._device.mac)", "return client.get_cameras() cameras = [WyzeCameraMotion(client, camera) for camera in await", "{ ATTR_ATTRIBUTION: ATTRIBUTION, \"state\": self.is_on, \"available\": self.available, \"device model\": self._device.product_model,", "= wyzeapi_client self._device = device self._last_event = int(str(int(time.time())) + \"000\")", "device self._last_event = int(str(int(time.time())) + \"000\") @property def device_info(self): return", "DEVICE_CLASS_MOTION def update(self): try: device_info = self._client.get_info(self._device) except AccessTokenError: self._client.reauthenticate()", "WyzeApi binary sensor component\"\"\") client: Client = hass.data[DOMAIN][config_entry.entry_id] def get_cameras()", "bool def __init__(self, wyzeapi_client: Client, device: Device): self._client = wyzeapi_client", "hass.data[DOMAIN][config_entry.entry_id] def get_cameras() -> List[Device]: try: return client.get_cameras() except AccessTokenError" ]
[ "BRDF 16-day Level 3 products, MCD43C1 (0.05 degree horz res),", "horz res), \"\"\" def __init__ (self,Path,lon,lat,Verb=1): \"\"\" Reads files for", "'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', ) ) ALIAS = dict ( BRDF_Albedo_Parameter1_Band1 =", "( LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6',", "file # ----------------------------------------------- for name in self.SDS: self.__dict__[name] = []", "'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL = ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary',", "self.__dict__[name] = [] BRDF = MISSING * ones((len(self.SDS),self.nobs)) for fn", "from datetime import date, datetime, timedelta from glob import glob", "'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629',", "implement a single directory, or a list of files and", "on the grid (dx,dy) \"\"\" dLon = 0.05 dLat =", "= 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 =", "\"\"\"Reads MCD43C1 file with Level 3 BRDF kernels for each", "hfile = SD(fn) except HDF4Error: if self.verb > 2: print", "> 2: print \"- %s: not recognized as an HDF", "print 'sds',self.SDS.index(sds) v = hfile.select(sds).get() a = hfile.select(sds).attributes() if a['scale_factor']!=1.0", "directories. \"\"\" if type(lon) is list: lon = array(lon) lat", "of lat, lon, return numbers to find the position of", "numbers to find the position of the nearest neighbor on", "self._findNearest(Path,lon,lat) # Read BRDF kernel in a MODIS tile #", "def _findNearest(self,path,lon,lat): \"\"\"Given a list of lat, lon, return numbers", "Level 3 BRDF kernels for each MODIS band.\"\"\" # Create", "except HDF4Error: if self.verb > 2: print \"- %s: not", "and directories. \"\"\" if type(lon) is list: lon = array(lon)", "'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114',", "QUAL = ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', ) ) ALIAS = dict", "BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6", "each MODIS band.\"\"\" # Create empty lists for SDS to", "BRDF[self.SDS.index(sds),:] if sds in ALIAS.keys(): self.__dict__[ALIAS[sds]] = self.__dict__[sds] #--- #............................................................................", "From a list of lat and lon, return the #", "kernels for each MODIS band.\"\"\" # Create empty lists for", "__name__ == \"__main__\": path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon = [-2.,-120.,15.2,17.2,170.1] lat", "On input, Required parameters: Path -- for now a single", "'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629',", "= a['scale_factor'] * v + a['add_offset'] if self.verb: print array(self.dx),", "\"[] Working on \"+fn hfile = SD(fn) except HDF4Error: if", "dy on the grid # ------------------------------------- self.nobs = len(lon) self._findNearest(Path,lon,lat)", "= 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 =", "nearest neighbor on the grid (dx,dy) \"\"\" dLon = 0.05", "= array(lon) lat = array(lat) # List of HDF files", "file\"%filename return # Read select variables (reshape to allow concatenation", "= Verb self.SDS = SDS['LAND'] #self.Tfiles = glob(Path + '*.hdf')", "or a list of files and directories. \"\"\" if type(lon)", "SD(fn) except HDF4Error: if self.verb > 2: print \"- %s:", "from numpy import loadtxt, array, tile, where, concatenate, flipud from", "lists for SDS to be read from file # -----------------------------------------------", "BRDF kernels for each MODIS band.\"\"\" # Create empty lists", "'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465',", "= 'KGEO_b7_2114', ) #........................................................................... class McD43C(object): \"\"\" This class implements", "self.verb > 2: print \"- %s: not recognized as an", "self.verb: print 'dx','dy', self.dx,self.dy #--- def read_BRDF(self): \"\"\"Reads MCD43C1 file", "present on a given *Path* and returns an object with", "type(lon) is list: lon = array(lon) lat = array(lat) #", "(0.5+(lon-Lon0)/dLon).astype(int) self.dy = (0.5+(lat-Lat0)/dLat).astype(int) if self.verb: print 'dx','dy', self.dx,self.dy #---", "sds in ALIAS.keys(): self.__dict__[ALIAS[sds]] = self.__dict__[sds] #--- #............................................................................ if __name__", "(dx,dy) \"\"\" dLon = 0.05 dLat = 0.05 Lon0 =", "variables (reshape to allow concatenation later) # ------------------------------------------------------------ for sds", "not recognized as an HDF file\"%filename return # Read select", "#--- def _findNearest(self,path,lon,lat): \"\"\"Given a list of lat, lon, return", "in self.SDS: if self.verb: print 'sds',self.SDS.index(sds) v = hfile.select(sds).get() a", "loadtxt, array, tile, where, concatenate, flipud from numpy import ones", "= glob(Path + '*.hdf') if type(Path) is str: self.Files =", "a['add_offset']!=0.0: v = a['scale_factor'] * v + a['add_offset'] if self.verb:", "( BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465',", "with all 3 kernels coeff. On input, Required parameters: Path", "Create empty lists for SDS to be read from file", "flipud(v) BRDF[self.SDS.index(sds),:] = v[array(self.dy), array(self.dx)] for sds in self.SDS: self.__dict__[sds]", "sds in self.SDS: if self.verb: print 'sds',self.SDS.index(sds) v = hfile.select(sds).get()", "McD43C(object): \"\"\" This class implements the MODIS LAND BRDF 16-day", "16-day Level 3 products, MCD43C1 (0.05 degree horz res), \"\"\"", "3 BRDF kernels for each MODIS band.\"\"\" # Create empty", "self.SDS: self.__dict__[name] = [] BRDF = MISSING * ones((len(self.SDS),self.nobs)) for", "BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4", "BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4", "3 kernels coeff. On input, Required parameters: Path -- for", "if type(lon) is list: lon = array(lon) lat = array(lat)", "if self.verb: print 'dx','dy', self.dx,self.dy #--- def read_BRDF(self): \"\"\"Reads MCD43C1", "(self,Path,lon,lat,Verb=1): \"\"\" Reads files for one day of Level 3", "files. \"\"\" import os import sys from numpy import loadtxt,", "# Create empty lists for SDS to be read from", "dict ( BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 =", "BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6", "'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114', ) #........................................................................... class McD43C(object): \"\"\" This", "# --------------------------------- self.read_BRDF() # Result #--- def _findNearest(self,path,lon,lat): \"\"\"Given a", "BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5", "band.\"\"\" # Create empty lists for SDS to be read", "timedelta from glob import glob from pyhdf.SD import SD, HDF4Error", "of lat and lon, return the # dx, dy on", "('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4',", "\"\"\" Reads climate modeling grid 0.05 degree MCD43 BRDF files.", "lat, lon, return numbers to find the position of the", "dx, dy on the grid # ------------------------------------- self.nobs = len(lon)", "parameters: Path -- for now a single file. Eventually implement", "Lat0 = -90 + dLat self.dx = (0.5+(lon-Lon0)/dLon).astype(int) self.dy =", "be read from file # ----------------------------------------------- for name in self.SDS:", "datetime import date, datetime, timedelta from glob import glob from", "#----------------------------------- self.verb = Verb self.SDS = SDS['LAND'] #self.Tfiles = glob(Path", "directory, or a list of files and directories. \"\"\" if", "# From a list of lat and lon, return the", "BRDF kernel in a MODIS tile # --------------------------------- self.read_BRDF() #", "array(self.dx)] for sds in self.SDS: self.__dict__[sds] = BRDF[self.SDS.index(sds),:] if sds", "0.05 Lon0 = -180 - dLon Lat0 = -90 +", "'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL = ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo',", "= array(lat) # List of HDF files for a given", "where, concatenate, flipud from numpy import ones from datetime import", "is str: self.Files = [Path] else: self.Files = Path #", "\"\"\" import os import sys from numpy import loadtxt, array,", "for SDS to be read from file # ----------------------------------------------- for", "'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241',", "print array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape v = flipud(v) BRDF[self.SDS.index(sds),:] =", "for a given date #----------------------------------- self.verb = Verb self.SDS =", "Path -- for now a single file. Eventually implement a", "+ dLat self.dx = (0.5+(lon-Lon0)/dLon).astype(int) self.dy = (0.5+(lat-Lat0)/dLat).astype(int) if self.verb:", "on \"+fn hfile = SD(fn) except HDF4Error: if self.verb >", "'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6',", "This class implements the MODIS LAND BRDF 16-day Level 3", "to find the position of the nearest neighbor on the", "= [-2.,-120.,15.2,17.2,170.1] lat = [88.,40.,-20.,-20.,-55.5] lon = np.arange(-180,180,1) lat =", "np.arange(-180,180,1) lat = np.arange(-90,90,1) lon,lat = np.meshgrid(lon,lat) ex = McD43C(path,lon.flatten(),lat.flatte())", "_findNearest(self,path,lon,lat): \"\"\"Given a list of lat, lon, return numbers to", "ones from datetime import date, datetime, timedelta from glob import", "BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114', ) #........................................................................... class McD43C(object):", "= 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114', ) #........................................................................... class McD43C(object): \"\"\"", "= 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 =", "SDS to be read from file # ----------------------------------------------- for name", "-- for now a single file. Eventually implement a single", "lon = [-2.,-120.,15.2,17.2,170.1] lat = [88.,40.,-20.,-20.,-55.5] lon = np.arange(-180,180,1) lat", "MCD43C1 (0.05 degree horz res), \"\"\" def __init__ (self,Path,lon,lat,Verb=1): \"\"\"", "= ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2',", "a given *Path* and returns an object with all 3", "%s: not recognized as an HDF file\"%filename return # Read", "in self.SDS: self.__dict__[sds] = BRDF[self.SDS.index(sds),:] if sds in ALIAS.keys(): self.__dict__[ALIAS[sds]]", "a list of files and directories. \"\"\" if type(lon) is", "file. Eventually implement a single directory, or a list of", "try: if self.verb: print \"[] Working on \"+fn hfile =", "kernels coeff. On input, Required parameters: Path -- for now", "= 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 =", "given date #----------------------------------- self.verb = Verb self.SDS = SDS['LAND'] #self.Tfiles", "\"\"\" Reads files for one day of Level 3 MCD43C1", "lon = array(lon) lat = array(lat) # List of HDF", "files for one day of Level 3 MCD43C1 present on", "a list of lat and lon, return the # dx,", "a single file. Eventually implement a single directory, or a", "of Level 3 MCD43C1 present on a given *Path* and", "self.Files = [Path] else: self.Files = Path # From a", "= self.__dict__[sds] #--- #............................................................................ if __name__ == \"__main__\": path =", "BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6", "MCD43C1 present on a given *Path* and returns an object", "= 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 =", "LAND BRDF 16-day Level 3 products, MCD43C1 (0.05 degree horz", "*Path* and returns an object with all 3 kernels coeff.", "grid (dx,dy) \"\"\" dLon = 0.05 dLat = 0.05 Lon0", "# Read BRDF kernel in a MODIS tile # ---------------------------------", "if sds in ALIAS.keys(): self.__dict__[ALIAS[sds]] = self.__dict__[sds] #--- #............................................................................ if", "[] BRDF = MISSING * ones((len(self.SDS),self.nobs)) for fn in self.Files:", "'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645',", "self.dx = (0.5+(lon-Lon0)/dLon).astype(int) self.dy = (0.5+(lat-Lat0)/dLat).astype(int) if self.verb: print 'dx','dy',", "type(Path) is str: self.Files = [Path] else: self.Files = Path", "= (0.5+(lat-Lat0)/dLat).astype(int) if self.verb: print 'dx','dy', self.dx,self.dy #--- def read_BRDF(self):", "'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL = ('BRDF_Albedo_Quality',", "and lon, return the # dx, dy on the grid", "self.dx,self.dy #--- def read_BRDF(self): \"\"\"Reads MCD43C1 file with Level 3", "class McD43C(object): \"\"\" This class implements the MODIS LAND BRDF", "list of lat, lon, return numbers to find the position", "empty lists for SDS to be read from file #", "dLat = 0.05 Lon0 = -180 - dLon Lat0 =", "if self.verb: print \"[] Working on \"+fn hfile = SD(fn)", "MODIS LAND BRDF 16-day Level 3 products, MCD43C1 (0.05 degree", "'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL = ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', ) ) ALIAS", "a single directory, or a list of files and directories.", "os import sys from numpy import loadtxt, array, tile, where,", "the grid # ------------------------------------- self.nobs = len(lon) self._findNearest(Path,lon,lat) # Read", "array(lon) lat = array(lat) # List of HDF files for", "lat = [88.,40.,-20.,-20.,-55.5] lon = np.arange(-180,180,1) lat = np.arange(-90,90,1) lon,lat", "single file. Eventually implement a single directory, or a list", "and returns an object with all 3 kernels coeff. On", "#........................................................................... class McD43C(object): \"\"\" This class implements the MODIS LAND", "BRDF[self.SDS.index(sds),:], v.shape v = flipud(v) BRDF[self.SDS.index(sds),:] = v[array(self.dy), array(self.dx)] for", "find the position of the nearest neighbor on the grid", ") #........................................................................... class McD43C(object): \"\"\" This class implements the MODIS", "fn in self.Files: try: if self.verb: print \"[] Working on", "3 MCD43C1 present on a given *Path* and returns an", "return numbers to find the position of the nearest neighbor", "'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553',", "Read select variables (reshape to allow concatenation later) # ------------------------------------------------------------", "--------------------------------- self.read_BRDF() # Result #--- def _findNearest(self,path,lon,lat): \"\"\"Given a list", "'dx','dy', self.dx,self.dy #--- def read_BRDF(self): \"\"\"Reads MCD43C1 file with Level", "\"__main__\": path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon = [-2.,-120.,15.2,17.2,170.1] lat = [88.,40.,-20.,-20.,-55.5]", "flipud from numpy import ones from datetime import date, datetime,", "in self.SDS: self.__dict__[name] = [] BRDF = MISSING * ones((len(self.SDS),self.nobs))", "from numpy import ones from datetime import date, datetime, timedelta", "= Path # From a list of lat and lon,", "or a['add_offset']!=0.0: v = a['scale_factor'] * v + a['add_offset'] if", "a['scale_factor'] * v + a['add_offset'] if self.verb: print array(self.dx), BRDF.shape,", "BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7", "concatenate, flipud from numpy import ones from datetime import date,", "# ------------------------------------------------------------ for sds in self.SDS: if self.verb: print 'sds',self.SDS.index(sds)", "BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114', )", "\"\"\" if type(lon) is list: lon = array(lon) lat =", "BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7", "#--- #............................................................................ if __name__ == \"__main__\": path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon", "ALIAS.keys(): self.__dict__[ALIAS[sds]] = self.__dict__[sds] #--- #............................................................................ if __name__ == \"__main__\":", "glob from pyhdf.SD import SD, HDF4Error MISSING = 32.767 SDS", "BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5", "(reshape to allow concatenation later) # ------------------------------------------------------------ for sds in", "('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', ) ) ALIAS = dict ( BRDF_Albedo_Parameter1_Band1", "= 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 =", "BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2", "file with Level 3 BRDF kernels for each MODIS band.\"\"\"", "tile # --------------------------------- self.read_BRDF() # Result #--- def _findNearest(self,path,lon,lat): \"\"\"Given", "allow concatenation later) # ------------------------------------------------------------ for sds in self.SDS: if", "a['scale_factor']!=1.0 or a['add_offset']!=0.0: v = a['scale_factor'] * v + a['add_offset']", "if self.verb: print array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape v = flipud(v)", "MCD43 BRDF files. \"\"\" import os import sys from numpy", "array(lat) # List of HDF files for a given date", "print \"[] Working on \"+fn hfile = SD(fn) except HDF4Error:", "select variables (reshape to allow concatenation later) # ------------------------------------------------------------ for", "------------------------------------- self.nobs = len(lon) self._findNearest(Path,lon,lat) # Read BRDF kernel in", "HDF4Error MISSING = 32.767 SDS = dict ( LAND =", "'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL = ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', ) )", "[Path] else: self.Files = Path # From a list of", "modeling grid 0.05 degree MCD43 BRDF files. \"\"\" import os", "for sds in self.SDS: self.__dict__[sds] = BRDF[self.SDS.index(sds),:] if sds in", "import date, datetime, timedelta from glob import glob from pyhdf.SD", "lat and lon, return the # dx, dy on the", "numpy import ones from datetime import date, datetime, timedelta from", "'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL = ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', )", "self.verb: print \"[] Working on \"+fn hfile = SD(fn) except", "# List of HDF files for a given date #-----------------------------------", "------------------------------------------------------------ for sds in self.SDS: if self.verb: print 'sds',self.SDS.index(sds) v", "= 32.767 SDS = dict ( LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4',", "HDF files for a given date #----------------------------------- self.verb = Verb", "Working on \"+fn hfile = SD(fn) except HDF4Error: if self.verb", "Level 3 MCD43C1 present on a given *Path* and returns", "concatenation later) # ------------------------------------------------------------ for sds in self.SDS: if self.verb:", "'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645',", "dLon = 0.05 dLat = 0.05 Lon0 = -180 -", "self.dy = (0.5+(lat-Lat0)/dLat).astype(int) if self.verb: print 'dx','dy', self.dx,self.dy #--- def", "return the # dx, dy on the grid # -------------------------------------", "MODIS tile # --------------------------------- self.read_BRDF() # Result #--- def _findNearest(self,path,lon,lat):", "= 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 =", "with Level 3 BRDF kernels for each MODIS band.\"\"\" #", "list: lon = array(lon) lat = array(lat) # List of", "= 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 =", "'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241',", "HDF file\"%filename return # Read select variables (reshape to allow", "self.__dict__[ALIAS[sds]] = self.__dict__[sds] #--- #............................................................................ if __name__ == \"__main__\": path", "self.nobs = len(lon) self._findNearest(Path,lon,lat) # Read BRDF kernel in a", "SD, HDF4Error MISSING = 32.767 SDS = dict ( LAND", "0.05 dLat = 0.05 Lon0 = -180 - dLon Lat0", "= 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 =", "'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL", "MODIS band.\"\"\" # Create empty lists for SDS to be", "== \"__main__\": path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon = [-2.,-120.,15.2,17.2,170.1] lat =", "'KGEO_b7_2114', ) #........................................................................... class McD43C(object): \"\"\" This class implements the", "__init__ (self,Path,lon,lat,Verb=1): \"\"\" Reads files for one day of Level", "read_BRDF(self): \"\"\"Reads MCD43C1 file with Level 3 BRDF kernels for", "MISSING = 32.767 SDS = dict ( LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2',", "from pyhdf.SD import SD, HDF4Error MISSING = 32.767 SDS =", "v + a['add_offset'] if self.verb: print array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape", "pyhdf.SD import SD, HDF4Error MISSING = 32.767 SDS = dict", "import glob from pyhdf.SD import SD, HDF4Error MISSING = 32.767", "return # Read select variables (reshape to allow concatenation later)", "<filename>src/Components/missions/GEMS/mcd43c.py \"\"\" Reads climate modeling grid 0.05 degree MCD43 BRDF", "BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7", "'sds',self.SDS.index(sds) v = hfile.select(sds).get() a = hfile.select(sds).attributes() if a['scale_factor']!=1.0 or", "for one day of Level 3 MCD43C1 present on a", "= 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 =", "lon = np.arange(-180,180,1) lat = np.arange(-90,90,1) lon,lat = np.meshgrid(lon,lat) ex", "SDS['LAND'] #self.Tfiles = glob(Path + '*.hdf') if type(Path) is str:", "Eventually implement a single directory, or a list of files", "-180 - dLon Lat0 = -90 + dLat self.dx =", "grid # ------------------------------------- self.nobs = len(lon) self._findNearest(Path,lon,lat) # Read BRDF", "+ a['add_offset'] if self.verb: print array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape v", "= 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 =", "BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2", "files for a given date #----------------------------------- self.verb = Verb self.SDS", "v[array(self.dy), array(self.dx)] for sds in self.SDS: self.__dict__[sds] = BRDF[self.SDS.index(sds),:] if", "hfile.select(sds).get() a = hfile.select(sds).attributes() if a['scale_factor']!=1.0 or a['add_offset']!=0.0: v =", "# Result #--- def _findNearest(self,path,lon,lat): \"\"\"Given a list of lat,", "coeff. On input, Required parameters: Path -- for now a", "if self.verb > 2: print \"- %s: not recognized as", "\"\"\" dLon = 0.05 dLat = 0.05 Lon0 = -180", "self.SDS: self.__dict__[sds] = BRDF[self.SDS.index(sds),:] if sds in ALIAS.keys(): self.__dict__[ALIAS[sds]] =", "= BRDF[self.SDS.index(sds),:] if sds in ALIAS.keys(): self.__dict__[ALIAS[sds]] = self.__dict__[sds] #---", "= -90 + dLat self.dx = (0.5+(lon-Lon0)/dLon).astype(int) self.dy = (0.5+(lat-Lat0)/dLat).astype(int)", "products, MCD43C1 (0.05 degree horz res), \"\"\" def __init__ (self,Path,lon,lat,Verb=1):", "'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465',", "date #----------------------------------- self.verb = Verb self.SDS = SDS['LAND'] #self.Tfiles =", "BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1", "single directory, or a list of files and directories. \"\"\"", "= 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 =", "a list of lat, lon, return numbers to find the", "\"+fn hfile = SD(fn) except HDF4Error: if self.verb > 2:", "hfile.select(sds).attributes() if a['scale_factor']!=1.0 or a['add_offset']!=0.0: v = a['scale_factor'] * v", "SDS = dict ( LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7',", "'/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon = [-2.,-120.,15.2,17.2,170.1] lat = [88.,40.,-20.,-20.,-55.5] lon = np.arange(-180,180,1)", "of HDF files for a given date #----------------------------------- self.verb =", "date, datetime, timedelta from glob import glob from pyhdf.SD import", "implements the MODIS LAND BRDF 16-day Level 3 products, MCD43C1", "= MISSING * ones((len(self.SDS),self.nobs)) for fn in self.Files: try: if", "Reads climate modeling grid 0.05 degree MCD43 BRDF files. \"\"\"", "HDF4Error: if self.verb > 2: print \"- %s: not recognized", "an object with all 3 kernels coeff. On input, Required", "self.Files: try: if self.verb: print \"[] Working on \"+fn hfile", "import sys from numpy import loadtxt, array, tile, where, concatenate,", "the position of the nearest neighbor on the grid (dx,dy)", "# ------------------------------------- self.nobs = len(lon) self._findNearest(Path,lon,lat) # Read BRDF kernel", "BRDF = MISSING * ones((len(self.SDS),self.nobs)) for fn in self.Files: try:", "glob import glob from pyhdf.SD import SD, HDF4Error MISSING =", "the nearest neighbor on the grid (dx,dy) \"\"\" dLon =", "(0.5+(lat-Lat0)/dLat).astype(int) if self.verb: print 'dx','dy', self.dx,self.dy #--- def read_BRDF(self): \"\"\"Reads", "self.__dict__[sds] #--- #............................................................................ if __name__ == \"__main__\": path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf'", "dLon Lat0 = -90 + dLat self.dx = (0.5+(lon-Lon0)/dLon).astype(int) self.dy", "Reads files for one day of Level 3 MCD43C1 present", "glob(Path + '*.hdf') if type(Path) is str: self.Files = [Path]", "'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553',", "self.read_BRDF() # Result #--- def _findNearest(self,path,lon,lat): \"\"\"Given a list of", "= '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon = [-2.,-120.,15.2,17.2,170.1] lat = [88.,40.,-20.,-20.,-55.5] lon =", "0.05 degree MCD43 BRDF files. \"\"\" import os import sys", "MCD43C1 file with Level 3 BRDF kernels for each MODIS", "'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629',", "def __init__ (self,Path,lon,lat,Verb=1): \"\"\" Reads files for one day of", "list of files and directories. \"\"\" if type(lon) is list:", "v.shape v = flipud(v) BRDF[self.SDS.index(sds),:] = v[array(self.dy), array(self.dx)] for sds", "* ones((len(self.SDS),self.nobs)) for fn in self.Files: try: if self.verb: print", "recognized as an HDF file\"%filename return # Read select variables", "'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241',", "later) # ------------------------------------------------------------ for sds in self.SDS: if self.verb: print", "to allow concatenation later) # ------------------------------------------------------------ for sds in self.SDS:", "= v[array(self.dy), array(self.dx)] for sds in self.SDS: self.__dict__[sds] = BRDF[self.SDS.index(sds),:]", "BRDF files. \"\"\" import os import sys from numpy import", "BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4", "len(lon) self._findNearest(Path,lon,lat) # Read BRDF kernel in a MODIS tile", "= 0.05 dLat = 0.05 Lon0 = -180 - dLon", "= 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 =", "returns an object with all 3 kernels coeff. On input,", "self.SDS: if self.verb: print 'sds',self.SDS.index(sds) v = hfile.select(sds).get() a =", "str: self.Files = [Path] else: self.Files = Path # From", "dict ( LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4',", "for name in self.SDS: self.__dict__[name] = [] BRDF = MISSING", "as an HDF file\"%filename return # Read select variables (reshape", "\"- %s: not recognized as an HDF file\"%filename return #", "BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3", "of files and directories. \"\"\" if type(lon) is list: lon", "Read BRDF kernel in a MODIS tile # --------------------------------- self.read_BRDF()", "#............................................................................ if __name__ == \"__main__\": path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon =", "in a MODIS tile # --------------------------------- self.read_BRDF() # Result #---", "# dx, dy on the grid # ------------------------------------- self.nobs =", "v = flipud(v) BRDF[self.SDS.index(sds),:] = v[array(self.dy), array(self.dx)] for sds in", "Required parameters: Path -- for now a single file. Eventually", "climate modeling grid 0.05 degree MCD43 BRDF files. \"\"\" import", "in self.Files: try: if self.verb: print \"[] Working on \"+fn", "day of Level 3 MCD43C1 present on a given *Path*", "name in self.SDS: self.__dict__[name] = [] BRDF = MISSING *", "on the grid # ------------------------------------- self.nobs = len(lon) self._findNearest(Path,lon,lat) #", "self.verb: print array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape v = flipud(v) BRDF[self.SDS.index(sds),:]", "from file # ----------------------------------------------- for name in self.SDS: self.__dict__[name] =", "sys from numpy import loadtxt, array, tile, where, concatenate, flipud", "the MODIS LAND BRDF 16-day Level 3 products, MCD43C1 (0.05", "input, Required parameters: Path -- for now a single file.", "self.verb = Verb self.SDS = SDS['LAND'] #self.Tfiles = glob(Path +", "= SD(fn) except HDF4Error: if self.verb > 2: print \"-", "= hfile.select(sds).get() a = hfile.select(sds).attributes() if a['scale_factor']!=1.0 or a['add_offset']!=0.0: v", "numpy import loadtxt, array, tile, where, concatenate, flipud from numpy", "----------------------------------------------- for name in self.SDS: self.__dict__[name] = [] BRDF =", "Result #--- def _findNearest(self,path,lon,lat): \"\"\"Given a list of lat, lon,", "an HDF file\"%filename return # Read select variables (reshape to", "BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114', ) #........................................................................... class McD43C(object): \"\"\" This class", "-90 + dLat self.dx = (0.5+(lon-Lon0)/dLon).astype(int) self.dy = (0.5+(lat-Lat0)/dLat).astype(int) if", "# ----------------------------------------------- for name in self.SDS: self.__dict__[name] = [] BRDF", "BRDF[self.SDS.index(sds),:] = v[array(self.dy), array(self.dx)] for sds in self.SDS: self.__dict__[sds] =", "list of lat and lon, return the # dx, dy", "of the nearest neighbor on the grid (dx,dy) \"\"\" dLon", "= hfile.select(sds).attributes() if a['scale_factor']!=1.0 or a['add_offset']!=0.0: v = a['scale_factor'] *", "= (0.5+(lon-Lon0)/dLon).astype(int) self.dy = (0.5+(lat-Lat0)/dLat).astype(int) if self.verb: print 'dx','dy', self.dx,self.dy", "'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114',", "read from file # ----------------------------------------------- for name in self.SDS: self.__dict__[name]", "self.__dict__[sds] = BRDF[self.SDS.index(sds),:] if sds in ALIAS.keys(): self.__dict__[ALIAS[sds]] = self.__dict__[sds]", "Path # From a list of lat and lon, return", "object with all 3 kernels coeff. On input, Required parameters:", "#self.Tfiles = glob(Path + '*.hdf') if type(Path) is str: self.Files", ") ) ALIAS = dict ( BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2", "'BRDF_Albedo_Ancillary', ) ) ALIAS = dict ( BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645',", "class implements the MODIS LAND BRDF 16-day Level 3 products,", "= ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', ) ) ALIAS = dict (", "tile, where, concatenate, flipud from numpy import ones from datetime", "import ones from datetime import date, datetime, timedelta from glob", "\"\"\" def __init__ (self,Path,lon,lat,Verb=1): \"\"\" Reads files for one day", "one day of Level 3 MCD43C1 present on a given", "neighbor on the grid (dx,dy) \"\"\" dLon = 0.05 dLat", "the # dx, dy on the grid # ------------------------------------- self.nobs", "'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL =", "def read_BRDF(self): \"\"\"Reads MCD43C1 file with Level 3 BRDF kernels", "'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856',", "degree MCD43 BRDF files. \"\"\" import os import sys from", "res), \"\"\" def __init__ (self,Path,lon,lat,Verb=1): \"\"\" Reads files for one", "files and directories. \"\"\" if type(lon) is list: lon =", "a given date #----------------------------------- self.verb = Verb self.SDS = SDS['LAND']", "2: print \"- %s: not recognized as an HDF file\"%filename", "if __name__ == \"__main__\": path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon = [-2.,-120.,15.2,17.2,170.1]", "position of the nearest neighbor on the grid (dx,dy) \"\"\"", "a['add_offset'] if self.verb: print array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape v =", "for now a single file. Eventually implement a single directory,", "= 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114', ) #...........................................................................", "BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5", "= SDS['LAND'] #self.Tfiles = glob(Path + '*.hdf') if type(Path) is", "= dict ( LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2',", "\"\"\"Given a list of lat, lon, return numbers to find", "ones((len(self.SDS),self.nobs)) for fn in self.Files: try: if self.verb: print \"[]", "import loadtxt, array, tile, where, concatenate, flipud from numpy import", "a MODIS tile # --------------------------------- self.read_BRDF() # Result #--- def", "for sds in self.SDS: if self.verb: print 'sds',self.SDS.index(sds) v =", "grid 0.05 degree MCD43 BRDF files. \"\"\" import os import", "array, tile, where, concatenate, flipud from numpy import ones from", "all 3 kernels coeff. On input, Required parameters: Path --", "path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon = [-2.,-120.,15.2,17.2,170.1] lat = [88.,40.,-20.,-20.,-55.5] lon", "'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856',", "lon, return the # dx, dy on the grid #", "\"\"\" This class implements the MODIS LAND BRDF 16-day Level", "= len(lon) self._findNearest(Path,lon,lat) # Read BRDF kernel in a MODIS", "[88.,40.,-20.,-20.,-55.5] lon = np.arange(-180,180,1) lat = np.arange(-90,90,1) lon,lat = np.meshgrid(lon,lat)", "degree horz res), \"\"\" def __init__ (self,Path,lon,lat,Verb=1): \"\"\" Reads files", "= np.arange(-180,180,1) lat = np.arange(-90,90,1) lon,lat = np.meshgrid(lon,lat) ex =", "ALIAS = dict ( BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856',", "* v + a['add_offset'] if self.verb: print array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:],", "#--- def read_BRDF(self): \"\"\"Reads MCD43C1 file with Level 3 BRDF", "'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114',", "= [] BRDF = MISSING * ones((len(self.SDS),self.nobs)) for fn in", "self.verb: print 'sds',self.SDS.index(sds) v = hfile.select(sds).get() a = hfile.select(sds).attributes() if", "+ '*.hdf') if type(Path) is str: self.Files = [Path] else:", "= -180 - dLon Lat0 = -90 + dLat self.dx", "BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1", "array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape v = flipud(v) BRDF[self.SDS.index(sds),:] = v[array(self.dy),", "self.Files = Path # From a list of lat and", "List of HDF files for a given date #----------------------------------- self.verb", "lon, return numbers to find the position of the nearest", "'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114', ) #........................................................................... class", "v = hfile.select(sds).get() a = hfile.select(sds).attributes() if a['scale_factor']!=1.0 or a['add_offset']!=0.0:", "if a['scale_factor']!=1.0 or a['add_offset']!=0.0: v = a['scale_factor'] * v +", "'BRDF_Albedo_Parameter3_Band7'), QUAL = ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', ) ) ALIAS =", "to be read from file # ----------------------------------------------- for name in", "= 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 =", "self.SDS = SDS['LAND'] #self.Tfiles = glob(Path + '*.hdf') if type(Path)", "= [Path] else: self.Files = Path # From a list", "= 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 =", "if self.verb: print 'sds',self.SDS.index(sds) v = hfile.select(sds).get() a = hfile.select(sds).attributes()", "if type(Path) is str: self.Files = [Path] else: self.Files =", "3 products, MCD43C1 (0.05 degree horz res), \"\"\" def __init__", "- dLon Lat0 = -90 + dLat self.dx = (0.5+(lon-Lon0)/dLon).astype(int)", "LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7',", "Lon0 = -180 - dLon Lat0 = -90 + dLat", "= flipud(v) BRDF[self.SDS.index(sds),:] = v[array(self.dy), array(self.dx)] for sds in self.SDS:", "on a given *Path* and returns an object with all", "sds in self.SDS: self.__dict__[sds] = BRDF[self.SDS.index(sds),:] if sds in ALIAS.keys():", "is list: lon = array(lon) lat = array(lat) # List", "'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'),", "v = a['scale_factor'] * v + a['add_offset'] if self.verb: print", "print 'dx','dy', self.dx,self.dy #--- def read_BRDF(self): \"\"\"Reads MCD43C1 file with", ") ALIAS = dict ( BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 =", "= 0.05 Lon0 = -180 - dLon Lat0 = -90", "from glob import glob from pyhdf.SD import SD, HDF4Error MISSING", "BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3", "Verb self.SDS = SDS['LAND'] #self.Tfiles = glob(Path + '*.hdf') if", "BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape v = flipud(v) BRDF[self.SDS.index(sds),:] = v[array(self.dy), array(self.dx)]", "in ALIAS.keys(): self.__dict__[ALIAS[sds]] = self.__dict__[sds] #--- #............................................................................ if __name__ ==", "(0.05 degree horz res), \"\"\" def __init__ (self,Path,lon,lat,Verb=1): \"\"\" Reads", "dLat self.dx = (0.5+(lon-Lon0)/dLon).astype(int) self.dy = (0.5+(lat-Lat0)/dLat).astype(int) if self.verb: print", "import os import sys from numpy import loadtxt, array, tile,", "MISSING * ones((len(self.SDS),self.nobs)) for fn in self.Files: try: if self.verb:", "a = hfile.select(sds).attributes() if a['scale_factor']!=1.0 or a['add_offset']!=0.0: v = a['scale_factor']", "lat = array(lat) # List of HDF files for a", "# Read select variables (reshape to allow concatenation later) #", "= 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 =", "= 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 =", "for each MODIS band.\"\"\" # Create empty lists for SDS", "datetime, timedelta from glob import glob from pyhdf.SD import SD,", "32.767 SDS = dict ( LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6',", "[-2.,-120.,15.2,17.2,170.1] lat = [88.,40.,-20.,-20.,-55.5] lon = np.arange(-180,180,1) lat = np.arange(-90,90,1)", "= 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 =", "given *Path* and returns an object with all 3 kernels", "else: self.Files = Path # From a list of lat", "Level 3 products, MCD43C1 (0.05 degree horz res), \"\"\" def", "= [88.,40.,-20.,-20.,-55.5] lon = np.arange(-180,180,1) lat = np.arange(-90,90,1) lon,lat =", "'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553',", "kernel in a MODIS tile # --------------------------------- self.read_BRDF() # Result", "now a single file. Eventually implement a single directory, or", "the grid (dx,dy) \"\"\" dLon = 0.05 dLat = 0.05", "= dict ( BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3", "for fn in self.Files: try: if self.verb: print \"[] Working", "import SD, HDF4Error MISSING = 32.767 SDS = dict (", "'*.hdf') if type(Path) is str: self.Files = [Path] else: self.Files", "print \"- %s: not recognized as an HDF file\"%filename return" ]
[ "x = Input(shape=(3, 2)) layer = wrappers.TimeDistributed(layers.BatchNormalization()) _ = layer(x)", "mask_outputs = [model.layers[0].compute_mask(model.input)] mask_outputs += [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func = K.function([model.input],", "need learning_phase to be set np.random.seed(1234) x = Input(shape=(3, 2))", "optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.skipif((K.backend() == 'cntk'), reason='Unknown timestamps", "compare to not using batch_input_shape test_input = np.random.randint(5, size=(10, 3,", "the shape so far: (N, t_1, t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True)))", "= layer(x, constants=c) model = Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse')", "3, 3)), epochs=1, batch_size=10) # test with BatchNormalization model =", "and RNN yet') @pytest.mark.skipif((K.backend() == 'cntk'), reason='Flaky with CNTK backend')", "len(layer.layer.states) y_merged = f_merged(X) y_forward = f_forward(X) y_backward = f_backward(X)", "layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1, inputs=None) assert len(layer.losses) == 8 assert len(layer.get_losses_for(None))", "dim)] inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2),", "= wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, c]) model = Model([x, c],", "batch_size=1) # test config model.get_config() model = model_from_json(model.to_json()) model.summary() #", "8 assert len(layer.get_losses_for(None)) == 6 assert len(layer.get_losses_for(x)) == 2 if", "y, epochs=1, batch_size=1) # test config model.get_config() model = model_from_json(model.to_json())", "model.fit(x, y, epochs=1, batch_size=1) # test config model.get_config() model =", "4), dtype='int32'), np.random.random((10, 3, 4, 6)), epochs=1, batch_size=10) # compare", "= y_merged[-n_states * 2:] y_forward = y_forward[-n_states:] y_backward = y_backward[-n_states:]", "model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.layers[0].layer.losses) == 1 assert len(model.layers[0].losses) ==", "if more than one constant passed self.input_kernel = self.add_weight( shape=(input_shape[-1],", "= layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for, s_bac,", "6), batch_input_shape=(10, 3, 4), dtype='int32')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5, size=(10, 3,", "epochs=1, batch_size=1) # Bidirectional and stateful inputs = Input(batch_shape=(1, timesteps,", "'mxnet', reason='Not supported.') @pytest.mark.parametrize('merge_mode', ['sum', 'concat', None]) def test_Bidirectional_dropout(merge_mode): rnn", "1 assert len(model.losses) == 1 model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2,", "getattr(x, '_uses_learning_phase') for x in outputs) inputs = Input((timesteps, dim))", "model.predict([x_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # test flat list inputs", "Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3, 4), dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output", "return_state=True), merge_mode=merge_mode) f_merged = K.function([inputs], layer(inputs)) f_forward = K.function([inputs], layer.forward_layer.call(inputs))", "2 assert len(layer.trainable_weights) == 2 @pytest.mark.skipif((K.backend() == 'cntk' or K.backend()", "f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged = f_merged(X) y_expected =", "kernel_regularizer='l1', bias_regularizer='l1')) _ = layer(x) assert len(layer.losses) == 4 assert", "input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.losses) == 1 def", "5)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test basic case", "RNN layer ref_mask_val_2 = np.any(ref_mask_val_1, axis=-1) # second RNN layer", "to_list(layer(inputs))) f_forward = K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)])", "layer.backward_layer.call(inputs)) n_states = len(layer.layer.states) y_merged = f_merged(X) y_forward = f_forward(X)", "with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, constants=c) model", "= {'units': self.units} base_config = super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) +", "3, 4), dtype='int32') for i in range(4): model_input[i, i:, i:]", "model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps, dim))) model.compile(loss='mse', optimizer='sgd') model.fit(x, y,", "stateful inputs = Input(batch_shape=(1, timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, stateful=True),", "reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3, 4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse')", "= np.random.randint(low=1, high=5, size=(10, 3, 4), dtype='int32') for i in", "to not using batch_input_shape test_input = np.random.randint(5, size=(10, 3, 4),", "of a BiRNN is the concatenation of the underlying RNNs", "assert len(layer.get_losses_for(x)) == 0 layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0, inputs=x)", "= wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode) f_merged = K.function([inputs], to_list(layer(inputs))) f_forward =", "2:] y_forward = y_forward[-n_states:] y_backward = y_backward[-n_states:] for state_birnn, state_inner", "layer(inputs)) f_forward = K.function([inputs], layer.forward_layer.call(inputs)) f_backward = K.function([inputs], layer.backward_layer.call(inputs)) n_states", "TimeDistributed and RNN yet') @pytest.mark.skipif((K.backend() == 'cntk'), reason='Flaky with CNTK", "== 1 model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'), input_shape=(3, 4)))", "yet') def test_TimeDistributed_trainable(): # test layers that need learning_phase to", "dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x,", "batch_size=6) mask_outputs = [model.layers[0].compute_mask(model.input)] mask_outputs += [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func =", "K.function([inputs], layer.forward_layer.call(inputs)) f_backward = K.function([inputs], layer.backward_layer.call(inputs)) n_states = len(layer.layer.states) y_merged", "= K.function([inputs], to_list(layer(inputs))) f_forward = K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward = K.function([inputs],", "self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units),", "layers that need learning_phase to be set x = Input(shape=(3,", "y, epochs=1, batch_size=1) # test with functional API inputs =", "ref_mask_val_2] for i in range(3): assert np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert mask_outputs[-1]", "layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, constants=c)", "layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model", "in outputs) model = Model(inputs, outputs) assert model.uses_learning_phase y1 =", "units = 3 X = [np.random.rand(samples, timesteps, dim)] inputs =", "c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does", "# Test basic case. x = Input((5, 5)) c =", "Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # Bidirectional", "5)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, c_np]) weights", "s_bac, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6,", "h_state + h_const return output, [output] def get_config(self): config =", "assert len(layer.trainable_weights) == 6 layer.trainable = False assert len(layer.trainable_weights) ==", "inputs=None) assert len(layer.losses) == 8 assert len(layer.get_losses_for(None)) == 6 assert", "func = K.function([model.input], mask_outputs) mask_outputs_val = func([model_input]) assert np.array_equal(mask_outputs_val[0], np.any(model_input,", "case. x = Input((5, 5)) c = Input((3,)) cell =", "Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2), padding='same'), input_shape=(2, 4, 4, 3))) model.add(layers.Activation('relu'))", "model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.losses) == 1 def test_Bidirectional(): rnn", "K.dot(inputs, self.input_kernel) h_state = K.dot(prev_output, self.recurrent_kernel) h_const = K.dot(constant, self.constant_kernel)", "= y_forward[-n_states:] y_backward = y_backward[-n_states:] for state_birnn, state_inner in zip(y_merged,", "layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, s_for, s_bac, c]) model", "def test_Bidirectional_merged_value(merge_mode): rnn = layers.LSTM samples = 2 dim =", "= model.predict([x_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet", "dim))) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test config", "isinstance(input_shape, list): raise TypeError('expects constants shape') [input_shape, constant_shape] = input_shape", "3, 4)), np.random.random((10, 3, 2)), epochs=1, batch_size=10) # test config", "reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) #", "= Input(shape=(3, 2)) layer = wrappers.TimeDistributed(layers.BatchNormalization()) _ = layer(x) assert", "y_rev: [y, y_rev] # basic case inputs = Input((timesteps, dim))", "with Dense layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(layers.Activation('relu'))", "wrapping Sequential model model = Sequential() model.add(layers.Dense(3, input_dim=2)) outer_model =", "== 1 assert uid in td._input_map assert K.int_shape(td._input_map[uid]) == (None,", "Input, Layer from keras.layers import RNN from keras import layers", "zip(y1, y2): assert_allclose(x1, x2, atol=1e-5) def test_Bidirectional_state_reuse(): rnn = layers.LSTM", "Model([x, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_2,", "from keras.layers import RNN from keras import layers from keras.models", "= to_list(merge_func(y_forward[0], y_backward[0])) assert len(y_merged) == len(y_expected) + n_states *", "layer ref_mask_val_2 = np.any(ref_mask_val_1, axis=-1) # second RNN layer ref_mask_val", "with functional API inputs = Input((timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim,", "= Model([input1, input2], output) assert len(model.layers) == 4 assert isinstance(model.layers[-1].input,", "input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10,", "c_np]) with pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s, atol=1e-4) # test flat list", "shape and Embeddings with mask_zero model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6,", "= wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, constants=c) model = Model([x, c],", "f_backward(X)[0])) assert len(y_merged) == len(y_expected) for x1, x2 in zip(y_merged,", "inputs=None) layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1, inputs=None) assert len(layer.updates) == 4 assert", "model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.losses)", "2))) model.compile(optimizer='rmsprop', loss='mse') # Assert that mean and variance are", "pytest import numpy as np import copy from numpy.testing import", "= Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3, 4), dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights)", "loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) #", "# test with Sequential model model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate,", "keras.utils.generic_utils import object_list_uid, to_list @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does", "model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # verify that", "output) assert len(model.layers) == 4 assert isinstance(model.layers[-1].input, list) inputs =", "Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps, dim))) model.compile(loss='mse', optimizer='sgd') model.fit(x,", "epochs=1, batch_size=10) mask_outputs = [model.layers[0].compute_mask(model.input)] for layer in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input,", "variance changed. assert not np.array_equal(td.get_weights()[2], np.array([0, 0])) assert not np.array_equal(td.get_weights()[3],", "input2], output) assert len(model.layers) == 4 assert isinstance(model.layers[-1].input, list) inputs", "range(3): assert np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert mask_outputs[-1] is None # final", "= Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode) f_merged =", "for i in range(3): assert np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert mask_outputs[-1] is", "outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10)", "reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Conv2D", "units = 3 input1 = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units,", "= True assert len(layer.trainable_weights) == 6 def test_Bidirectional_updates(): x =", "K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged = f_merged(X) y_expected = to_list(merge_func(f_forward(X)[0], f_backward(X)[0]))", "model = Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse') model_input", "in ['sum', 'concat']: x = np.random.random((samples, timesteps, dim)) target_dim =", "2)), epochs=1, batch_size=10) # test config model.get_config() # test when", "K.function([inputs], to_list(layer(inputs))) f_forward = K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs),", "def test_TimeDistributed_trainable(): # test layers that need learning_phase to be", "size=(1, 10, 2)), np.broadcast_to(np.array([0, 1]), (1, 10, 2))) # Assert", "= 5 timesteps = 3 units = 3 X =", "dtype='int32') for i in range(4): model_input[i, i:, i:] = 0", "object_list_uid(model.inputs) assert len(td._input_map.keys()) == 1 assert uid in td._input_map assert", "output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model = Model([input1, input2], output) assert", "model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 32)), np.zeros((6, 32)),", "self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units, self.units),", "in zip(y_merged, y_forward + y_backward): assert_allclose(state_birnn, state_inner, atol=1e-5) @pytest.mark.skipif(K.backend() ==", "assert len(model.losses) == 1 def test_Bidirectional(): rnn = layers.SimpleRNN samples", "2 output_dim = 2 dropout_rate = 0.2 for mode in", "outputs) inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True),", "len(layer.updates) == 4 assert len(layer.get_updates_for(None)) == 2 assert len(layer.get_updates_for(x)) ==", "merge_func = lambda y, y_rev: [y, y_rev] # basic case", "list output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model = Model([input1, input2], output)", "input_shape=(None, None))) # the shape so far: (N, t_1, t_2,", "s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # verify that state", "reference_output, atol=1e-05) # test with Embedding model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5,", "functional API x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(model)(x) outer_model", "samples = 2 dim = 5 timesteps = 3 units", "y) model.set_weights(weights) y_np_2 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) #", "'cntk'), reason='Flaky with CNTK backend') def test_TimeDistributed_learning_phase(): # test layers", "3, 5)), epochs=1, batch_size=6) mask_outputs = [model.layers[0].compute_mask(model.input)] mask_outputs += [model.layers[1].compute_mask(model.layers[1].input,", "model.predict([x_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend", "[output] def get_config(self): config = {'units': self.units} base_config = super(RNNCellWithConstants,", "inputs=x) layer.backward_layer.add_update(1, inputs=None) assert len(layer.updates) == 4 assert len(layer.get_updates_for(None)) ==", "len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 layer.trainable = False", "== 4 assert len(layer.get_updates_for(None)) == 2 assert len(layer.get_updates_for(x)) == 2", "# Verify input_map has one mapping from inputs to reshaped", "0])) assert not np.array_equal(td.get_weights()[3], np.array([1, 1])) # Verify input_map has", "assert len(layer.updates) == 4 assert len(layer.get_updates_for(None)) == 2 assert len(layer.get_updates_for(x))", "x in outputs) model = Model(inputs, outputs) assert model.uses_learning_phase y1", "== 'cntk' or K.backend() == 'mxnet'), reason='Unknown timestamps for RNN", "model = model_from_json(model.to_json()) model.summary() # test stacked bidirectional layers model", "2))) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1,", "np.random.random((1, 3, 4)) test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference", "= Input(shape=(3, 2)) y = wrappers.TimeDistributed(model)(x) outer_model = Model(x, y)", "wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True)) state = layer(input1)[1:] # test passing invalid", "Input(shape=(3, 2)) y = wrappers.TimeDistributed(model)(x) outer_model = Model(x, y) outer_model.compile(optimizer='rmsprop',", "f_merged(X) y_forward = f_forward(X) y_backward = f_backward(X) y_expected = to_list(merge_func(y_forward[0],", "2 dropout_rate = 0.2 for mode in ['sum', 'concat']: x", "4)) test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential()", "[np.random.rand(samples, timesteps, dim), np.random.rand(samples, timesteps, dim)] outputs = model.predict(inputs) @pytest.mark.skipif(K.backend()", "= 2 output_dim = 2 dropout_rate = 0.2 for mode", "def test_TimeDistributed_with_masking_layer(): # test with Masking layer model = Sequential()", "flat list inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y =", "underlying RNNs y_merged = y_merged[-n_states * 2:] y_forward = y_forward[-n_states:]", "return_state=True), merge_mode=merge_mode) outputs = to_list(wrapped(inputs)) assert all(x._uses_learning_phase for x in", "RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell))", "self.units), initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel')", "model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5, size=(10, 3, 4), dtype='int32'), np.random.random((10, 3, 4,", "wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode) f_merged = K.function([inputs], layer(inputs)) f_forward = K.function([inputs],", "self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel')", "np.random.rand(samples, timesteps, dim)] outputs = model.predict(inputs) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet", "y_expected = to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert len(y_merged) == len(y_expected) for x1,", "wrappers.Bidirectional(RNN(cell)) y = layer(x, constants=c) model = Model([x, c], y)", "mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func = K.function([model.input], mask_outputs[:-1]) mask_outputs_val = func([model_input]) ref_mask_val_0", "Input((32,)) s_bac = Input((32,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants':", "len(layer.get_losses_for(None)) == 6 assert len(layer.get_losses_for(x)) == 2 if __name__ ==", "_ = layer(x) assert len(layer.updates) == 2 assert len(layer.trainable_weights) ==", "dim = 5 timesteps = 3 units = 3 X", "== 0 layer.trainable = True assert len(layer.trainable_weights) == 6 def", "that need learning_phase to be set x = Input(shape=(3, 2))", "c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # verify that state is used", "wrappers, Input, Layer from keras.layers import RNN from keras import", "layers.Dense(2, activity_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.losses) ==", "K.dot(constant, self.constant_kernel) output = h_input + h_state + h_const return", "i:, i:] = 0 model.fit(model_input, np.random.random((10, 1)), epochs=1, batch_size=10) mask_outputs", "RNNs y_merged = y_merged[-n_states * 2:] y_forward = y_forward[-n_states:] y_backward", "config model.get_config() # test when specifying a batch_input_shape test_input =", "np.random.random((6, 32)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, s_fw_np,", "model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 32)), np.zeros((6, 32)), np.zeros((6, 3))],", "high=5, size=(10, 3, 4), dtype='int32') for i in range(4): model_input[i,", "constants h_input = K.dot(inputs, self.input_kernel) h_state = K.dot(prev_output, self.recurrent_kernel) h_const", "= lambda y, y_rev: (y + y_rev) / 2 elif", "Input(shape=(3, 2)) layer = wrappers.TimeDistributed(layers.BatchNormalization()) _ = layer(x) assert len(layer.updates)", "= model_from_json(model.to_json()) model.summary() # test stacked bidirectional layers model =", "4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.layers[0].layer.losses) == 1 assert len(model.layers[0].losses)", "y_np = model.predict([x_np, c_np]) weights = model.get_weights() config = layer.get_config()", "f_merged = K.function([inputs], layer(inputs)) f_forward = K.function([inputs], layer.forward_layer.call(inputs)) f_backward =", "td = model.layers[0] assert np.array_equal(td.get_weights()[2], np.array([0, 0])) assert np.array_equal(td.get_weights()[3], np.array([1,", "model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) def test_Bidirectional_trainable(): #", "assert all(not getattr(x, '_uses_learning_phase') for x in outputs) inputs =", "x2, atol=1e-5) def test_Bidirectional_state_reuse(): rnn = layers.LSTM samples = 2", "inputs = Input(batch_shape=(1, timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs)", "reason='MXNet backend does not support TimeDistributed and RNN yet') def", "custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y", "0 assert len(layer.get_updates_for(x)) == 0 layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0,", "Sequential, Model, model_from_json from keras import backend as K from", "model = Sequential() model.add(layers.Dense(3, input_dim=2)) outer_model = Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3,", "loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 2)), epochs=1, batch_size=10) #", "X = [np.random.rand(samples, timesteps, dim)] inputs = Input((timesteps, dim)) wrapped", "merge_mode=merge_mode) f_merged = K.function([inputs], to_list(layer(inputs))) f_forward = K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward", "Model(x, y) y = model.predict(np.random.random((10, 3, 2))) assert_allclose(np.mean(y), 0., atol=1e-1,", "mask_outputs[-1])) func = K.function([model.input], mask_outputs[:-1]) mask_outputs_val = func([model_input]) ref_mask_val_0 =", "import object_list_uid, to_list @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not", "2 assert len(layer.trainable_weights) == 2 layer.trainable = False assert len(layer.updates)", "and RNN yet') def test_TimeDistributed_trainable(): # test layers that need", "merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1,", "= func([model_input]) assert np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1)) assert np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1))", "assert np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1)) assert np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1)) def test_regularizers():", "'concat': merge_func = lambda y, y_rev: np.concatenate((y, y_rev), axis=-1) else:", "outer_model = Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3,", "= wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse',", "= 3 X = [np.random.rand(samples, timesteps, dim)] inputs = Input((timesteps,", "wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode) outputs = to_list(wrapped(inputs, training=True)) assert all(not", "isinstance(model.layers[-1].input, list) inputs = [np.random.rand(samples, timesteps, dim), np.random.rand(samples, timesteps, dim)]", "CustomObjectScope from keras.layers import wrappers, Input, Layer from keras.layers import", "wrappers.TimeDistributed(layers.BatchNormalization()) _ = layer(x) assert len(layer.updates) == 2 assert len(layer.trainable_weights)", "input2 = Input((timesteps, dim)) with pytest.raises(ValueError): output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0])", "model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4))", "timesteps, dim)] inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2,", "recurrent_dropout=0.2), merge_mode=merge_mode) outputs = to_list(wrapped(inputs, training=True)) assert all(not getattr(x, '_uses_learning_phase')", "== 'mxnet'), reason='Unknown timestamps for RNN not supported in CNTK", "2 assert len(layer.get_updates_for(x)) == 2 def test_Bidirectional_losses(): x = Input(shape=(3,", "merge_mode=mode, input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1,", "np.random.random((10, 3, 2)), epochs=1, batch_size=10) # test config model.get_config() #", "y_merged[-n_states * 2:] y_forward = y_forward[-n_states:] y_backward = y_backward[-n_states:] for", "layer([x, s_for, s_bac, c]) model = Model([x, s_for, s_bac, c],", "layer = wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode) f_merged = K.function([inputs], layer(inputs)) f_forward", "timesteps, dim)) target_dim = 2 * output_dim if mode ==", "dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps, dim))) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1,", "be set x = Input(shape=(3, 2)) layer = wrappers.TimeDistributed(layers.BatchNormalization()) _", "= Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode, input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse',", "size=(10, 3, 4)) for i in range(4): model_input[i, i:, :]", "'mxnet'), reason='Unknown timestamps for RNN not supported in CNTK and", "# second RNN layer ref_mask_val = [ref_mask_val_0, ref_mask_val_1, ref_mask_val_2] for", "assert np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1)) def test_regularizers(): model = Sequential() model.add(wrappers.TimeDistributed(", "epochs=1, batch_size=1) # test config model.get_config() model = model_from_json(model.to_json()) model.summary()", "inputs = Input((timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs)", "# test with functional API with dynamic length rnn =", "= super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) + list(config.items())) # Test basic", "c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 32)),", "len(layer.get_losses_for(None)) == 4 assert len(layer.get_losses_for(x)) == 0 layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1,", "5, 5)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test basic", "i:, :] = 0. model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input, np.random.random((10, 3, 5)),", "y = layer(x, constants=c) model = Model([x, c], y) model.set_weights(weights)", "'mxnet', reason='MXNet backend does not support custom RNN cell yet')", "than one constant passed self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform',", "assert_allclose from keras.utils import CustomObjectScope from keras.layers import wrappers, Input,", "= wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model = Model([input1, input2], output) assert len(model.layers)", "= Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) #", "test wrapping Sequential model model = Sequential() model.add(layers.Dense(3, input_dim=2)) outer_model", "batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum', 'mul', 'ave', 'concat', None]) def test_Bidirectional_merged_value(merge_mode): rnn", "passing a list output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model = Model([input1,", "mask_outputs_val = func([model_input]) assert np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1)) assert np.array_equal(mask_outputs_val[1], np.any(model_input,", "# Assert that mean and variance are 0 and 1.", "self).__init__(**kwargs) def build(self, input_shape): if not isinstance(input_shape, list): raise TypeError('expects", "x2, atol=1e-5) # test if the state of a BiRNN", "import RNN from keras import layers from keras.models import Sequential,", "model_from_json(model.to_json()) model.summary() # test stacked layers model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2),", "output_dim y = np.random.random((samples, target_dim)) # test with Sequential model", "dim)] outputs = model.predict(inputs) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does", "'theano' or K.backend() == 'mxnet', reason='Not supported.') @pytest.mark.parametrize('merge_mode', ['sum', 'concat',", "assert_allclose(test_output, reference_output, atol=1e-05) # test with Embedding model = Sequential()", "True assert len(layer.trainable_weights) == 6 def test_Bidirectional_updates(): x = Input(shape=(3,", "y_np_3, atol=1e-4) def test_Bidirectional_trainable(): # test layers that need learning_phase", "model = Model([x, s_for, s_bac, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(", "model.predict(inputs) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support custom", "return_sequences=True), merge_mode=mode, input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse', optimizer='sgd') model.fit(x, y,", "= Model([x, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, c_np]) assert_allclose(y_np,", "atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support", "merge_func = lambda y, y_rev: (y + y_rev) / 2", "and variance are 0 and 1. td = model.layers[0] assert", "variance are 0 and 1. td = model.layers[0] assert np.array_equal(td.get_weights()[2],", "= np.random.random((6, 3)) y_np = model.predict([x_np, s_fw_np, s_bk_np, c_np]) weights", "= K.function([model.input], mask_outputs[:-1]) mask_outputs_val = func([model_input]) ref_mask_val_0 = model_input >", "merge_mode=merge_mode) f_merged = K.function([inputs], layer(inputs)) f_forward = K.function([inputs], layer.forward_layer.call(inputs)) f_backward", "return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1,", "test_regularizers(): model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu'))", "get_config(self): config = {'units': self.units} base_config = super(RNNCellWithConstants, self).get_config() return", "model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3, 4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop',", "for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) #", "learning_phase to be set x = Input(shape=(3, 2)) layer =", "[input_shape, constant_shape] = input_shape # will (and should) raise if", "layer(x, constants=c) model = Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(", "mask_zero=True), input_shape=(None, None))) # the shape so far: (N, t_1,", "target_dim)) inputs = Input((None, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate),", "= layers.SimpleRNN samples = 2 dim = 2 timesteps =", "2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test with BatchNormalization", "return_state=True, return_sequences=True)) state = layer(input1)[1:] # test passing invalid initial_state:", "input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.layers[0].layer.losses) == 1 assert", "merge_mode=merge_mode) outputs = to_list(wrapped(inputs)) assert all(x._uses_learning_phase for x in outputs)", "inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode) f_merged", "case. x = Input((5, 5)) c = Input((3,)) s_for =", "layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1')) _ = layer(x) assert len(layer.losses) == 4", "2))) # Assert that mean and variance changed. assert not", "from keras.utils import CustomObjectScope from keras.layers import wrappers, Input, Layer", "= K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged = f_merged(X) y_expected = to_list(merge_func(f_forward(X)[0],", "backend does not support custom RNN cell yet') def test_Bidirectional_with_constants_layer_passing_initial_state():", "test with unspecified shape and Embeddings with mask_zero model =", "y) model.set_weights(weights) y_np_2 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_2,", "= wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode) outputs = to_list(wrapped(inputs, training=True)) assert", "5, 5)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, c_np])", "Sequential model model = Sequential() model.add(layers.Dense(3, input_dim=2)) outer_model = Sequential()", "elif merge_mode == 'concat': merge_func = lambda y, y_rev: np.concatenate((y,", "c]) model = Model([x, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np,", "= model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1,", "= wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1')) _ = layer(x) assert len(layer.losses)", "model.predict(np.random.random((10, 3, 2))) assert_allclose(np.mean(y), 0., atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend() == 'mxnet',", "layer ref_mask_val = [ref_mask_val_0, ref_mask_val_1, ref_mask_val_2] for i in range(3):", "f_forward(X) y_backward = f_backward(X) y_expected = to_list(merge_func(y_forward[0], y_backward[0])) assert len(y_merged)", "reason='Unknown timestamps not supported in CNTK.') def test_Bidirectional_dynamic_timesteps(): # test", "reason='MXNet backend does not support custom RNN cell yet') def", "epochs=1, batch_size=6) mask_outputs = [model.layers[0].compute_mask(model.input)] mask_outputs += [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func", "= model.predict([x_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # test flat list", "batch_size=10) # test config model.get_config() # test when specifying a", "= Model(inputs, outputs) assert model.uses_learning_phase y1 = to_list(model.predict(X)) y2 =", "ref_mask_val_1, ref_mask_val_2] for i in range(3): assert np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert", "self).get_config() return dict(list(base_config.items()) + list(config.items())) # Test basic case. x", "cell yet') def test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs):", "else output_dim y = np.random.random((samples, target_dim)) # test with Sequential", "raise TypeError('expects constants shape') [input_shape, constant_shape] = input_shape # will", "in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test if the", "does not support TimeDistributed and RNN yet') def test_TimeDistributed_with_masking_layer(): #", "in range(4): model_input[i, i:, :] = 0. model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input,", "loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 32)), np.zeros((6, 32)), np.zeros((6,", "custom RNN cell yet') def test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer): def __init__(self,", "= 2 dim = 5 timesteps = 3 units =", "atol=1e-5) def test_Bidirectional_state_reuse(): rnn = layers.LSTM samples = 2 dim", "not support custom RNN cell yet') def test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer):", "layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse')", "model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)),", "constants=c) model = Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6,", "input_shape=(10, 2))) model.compile(optimizer='rmsprop', loss='mse') # Assert that mean and variance", "else output_dim y = np.random.random((samples, target_dim)) inputs = Input((None, dim))", "reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Embedding model =", "outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # Bidirectional and", "support TimeDistributed and RNN yet') def test_TimeDistributed(): # first, test", "state is used y_np_2_different_s = model.predict([x_np, s_fw_np + 10., s_bk_np", "__init__(self, units, **kwargs): self.units = units self.state_size = units super(RNNCellWithConstants,", "backend') def test_TimeDistributed_learning_phase(): # test layers that need learning_phase to", "== 1 assert len(model.layers[0].losses) == 1 assert len(model.layers[0].get_losses_for(None)) == 1", "model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 3))], np.zeros((6, 64))", "= f_merged(X) y_expected = to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert len(y_merged) == len(y_expected)", "does not support TimeDistributed and RNN yet') def test_TimeDistributed(): #", "with Sequential model model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode,", "= np.random.random((6, 5, 5)) s_fw_np = np.random.random((6, 32)) s_bk_np =", "y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 32)), np.zeros((6,", "= states [constant] = constants h_input = K.dot(inputs, self.input_kernel) h_state", "= 3 input1 = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True,", "dim)] if merge_mode == 'sum': merge_func = lambda y, y_rev:", "= wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates) == 0 assert len(layer.get_updates_for(None)) == 0", "Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode) outputs =", "cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer", "that state is used y_np_2_different_s = model.predict([x_np, s_fw_np + 10.,", "batch_size=10) # test with BatchNormalization model = Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True,", "== 2 assert len(layer.trainable_weights) == 2 layer.trainable = False assert", "s_bk_np + 10., c_np]) with pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s, atol=1e-4) #", "y2 = to_list(model.predict(X)) for x1, x2 in zip(y1, y2): assert_allclose(x1,", "{'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x,", "layer.forward_layer.call(inputs)) f_backward = K.function([inputs], layer.backward_layer.call(inputs)) n_states = len(layer.layer.states) y_merged =", "test_Bidirectional_losses(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1',", "2), padding='same'), input_shape=(2, 4, 4, 3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1,", "base_config = super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) + list(config.items())) # Test", "def test_TimeDistributed(): # first, test with Dense layer model =", "32)), np.zeros((6, 32)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test", "layer(x) assert len(layer.losses) == 4 assert len(layer.get_losses_for(None)) == 4 assert", "= wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode) outputs = to_list(wrapped(inputs)) assert all(x._uses_learning_phase", "import backend as K from keras.utils.generic_utils import object_list_uid, to_list @pytest.mark.skipif(K.backend()", "= np.random.randint(5, size=(10, 3, 4), dtype='int32') test_output = model.predict(test_input) weights", "scale=True), name='bn', input_shape=(10, 2))) model.compile(optimizer='rmsprop', loss='mse') # Assert that mean", "CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, s_for, s_bac, c])", "model = model_from_json(model.to_json()) model.summary() # test stacked layers model =", "(None, 2) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support", "4)), np.random.random((10, 3, 2)), epochs=1, batch_size=10) # test config model.get_config()", "# final layer @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not", "backend does not support TimeDistributed and RNN yet') def test_TimeDistributed_trainable():", "self.units} base_config = super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) + list(config.items())) #", "= np.random.random((samples, target_dim)) inputs = Input((None, dim)) outputs = wrappers.Bidirectional(rnn(output_dim,", "== 'mul': merge_func = lambda y, y_rev: y * y_rev", "y_backward[-n_states:] for state_birnn, state_inner in zip(y_merged, y_forward + y_backward): assert_allclose(state_birnn,", "Dense layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop',", "i in range(4): model_input[i, i:, :] = 0. model.compile(optimizer='rmsprop', loss='mse')", "optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test config model.get_config() model", "embedding layer ref_mask_val_1 = ref_mask_val_0 # first RNN layer ref_mask_val_2", "i in range(4): model_input[i, i:, i:] = 0 model.fit(model_input, np.random.random((10,", "model.get_config() # test when specifying a batch_input_shape test_input = np.random.random((1,", "case inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode)", "y_expected): assert_allclose(x1, x2, atol=1e-5) # test return_state inputs = Input((timesteps,", "2)), np.broadcast_to(np.array([0, 1]), (1, 10, 2))) # Assert that mean", "wrappers.Bidirectional(RNN(cell)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x,", "copy from numpy.testing import assert_allclose from keras.utils import CustomObjectScope from", "= to_list(wrapped(inputs, training=True)) assert all(not getattr(x, '_uses_learning_phase') for x in", "BiRNN is the concatenation of the underlying RNNs y_merged =", "test_Bidirectional_dropout(merge_mode): rnn = layers.LSTM samples = 2 dim = 5", "def test_Bidirectional_state_reuse(): rnn = layers.LSTM samples = 2 dim =", "assert_allclose(test_output, reference_output, atol=1e-05) # test with Conv2D model = Sequential()", "does not support TimeDistributed and RNN yet') @pytest.mark.skipif((K.backend() == 'cntk'),", "= reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Conv2D model", "assert len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.updates) ==", "3, 4)) for i in range(4): model_input[i, i:, :] =", "units self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape): if", "2 def test_Bidirectional_losses(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(", "model.compile(optimizer='rmsprop', loss='mse') assert len(model.losses) == 1 def test_Bidirectional(): rnn =", "K.backend() == 'mxnet'), reason='Unknown timestamps for RNN not supported in", "assert len(layer.losses) == 8 assert len(layer.get_losses_for(None)) == 6 assert len(layer.get_losses_for(x))", "reshaped inputs. uid = object_list_uid(model.inputs) assert len(td._input_map.keys()) == 1 assert", "in CNTK.') def test_Bidirectional_dynamic_timesteps(): # test with functional API with", "dict(list(base_config.items()) + list(config.items())) # Test basic case. x = Input((5,", "dtype='int32'), np.random.random((10, 3, 4, 6)), epochs=1, batch_size=10) # compare to", "test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5,", "s_fw_np, s_bk_np, c_np]) weights = model.get_weights() config = layer.get_config() with", "y_merged = f_merged(X) y_expected = to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert len(y_merged) ==", "model.set_weights(weights) y_np_2 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4)", "merge_mode=mode)) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test with", "x = Input(shape=(3, 2)) layer = wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1'))", "4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output,", "= Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True)) state =", "+ y_rev elif merge_mode == 'mul': merge_func = lambda y,", "constant passed self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel", "test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2),", "assert_allclose(x1, x2, atol=1e-5) def test_Bidirectional_state_reuse(): rnn = layers.LSTM samples =", "= Model([x, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, c_np]) assert_allclose(y_np,", "inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode)", "4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3,", "inputs, states, constants): [prev_output] = states [constant] = constants h_input", "Input((5, 5)) c = Input((3,)) cell = RNNCellWithConstants(32) custom_objects =", "verify that state is used y_np_2_different_s = model.predict([x_np, s_fw_np +", "# basic case inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units,", "= Input((32,)) s_bac = Input((32,)) cell = RNNCellWithConstants(32) custom_objects =", "input_dim=2)) outer_model = Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10,", "cell yet') def test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs):", "5)) s_fw_np = np.random.random((6, 32)) s_bk_np = np.random.random((6, 32)) c_np", "Input((None, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model =", "= 2 timesteps = 2 output_dim = 2 dropout_rate =", "assert isinstance(model.layers[-1].input, list) inputs = [np.random.rand(samples, timesteps, dim), np.random.rand(samples, timesteps,", "4), dtype='int32') test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference =", "n_states * 2 for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1,", "layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1, inputs=None) assert len(layer.updates)", "(1, 10, 2))) # Assert that mean and variance changed.", "yet') def test_TimeDistributed_with_masking_layer(): # test with Masking layer model =", "= layers.LSTM samples = 2 dim = 5 timesteps =", "self.recurrent_kernel) h_const = K.dot(constant, self.constant_kernel) output = h_input + h_state", "y = np.random.random((samples, target_dim)) # test with Sequential model model", "basic case serialization. x_np = np.random.random((6, 5, 5)) s_fw_np =", "# test passing invalid initial_state: passing a tensor input2 =", "atol=1e-5) # test return_state inputs = Input((timesteps, dim)) layer =", "y_expected): assert_allclose(x1, x2, atol=1e-5) # test if the state of", "= K.function([inputs], layer(inputs)) f_forward = K.function([inputs], layer.forward_layer.call(inputs)) f_backward = K.function([inputs],", "model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode, input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode))", "y_rev: np.concatenate((y, y_rev), axis=-1) else: merge_func = lambda y, y_rev:", "epochs=1, batch_size=10) # test with functional API x = Input(shape=(3,", "model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test with functional", "0 layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1, inputs=None) assert", "= model.layers[0] assert np.array_equal(td.get_weights()[2], np.array([0, 0])) assert np.array_equal(td.get_weights()[3], np.array([1, 1]))", "== 6 layer.trainable = False assert len(layer.trainable_weights) == 0 layer.trainable", "initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built", "3)), epochs=1, batch_size=10) # test with functional API x =", "with Masking layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5)))", "* 2 for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2,", "3)) y_np = model.predict([x_np, s_fw_np, s_bk_np, c_np]) weights = model.get_weights()", "Layer from keras.layers import RNN from keras import layers from", "case serialization. x_np = np.random.random((6, 5, 5)) c_np = np.random.random((6,", "loss='mse') model.train_on_batch(np.random.random((1, 2, 4, 4, 3)), np.random.random((1, 2, 4, 4,", "y_np_2_different_s, atol=1e-4) # test flat list inputs with CustomObjectScope(custom_objects): layer", "@pytest.mark.parametrize('merge_mode', ['sum', 'mul', 'ave', 'concat', None]) def test_Bidirectional_merged_value(merge_mode): rnn =", "= Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode) outputs", "merge_mode=merge_mode) outputs = to_list(wrapped(inputs, training=True)) assert all(not getattr(x, '_uses_learning_phase') for", "input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3,", "Input(batch_shape=(1, timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs) model =", "inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode)", "assert len(layer.trainable_weights) == 2 @pytest.mark.skipif((K.backend() == 'cntk' or K.backend() ==", "y) model.set_weights(weights) y_np_3 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend()", "y_np_3 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend() == 'mxnet',", "support custom RNN cell yet') def test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer): def", "dim = 5 timesteps = 3 units = 3 input1", "'concat', None]) def test_Bidirectional_dropout(merge_mode): rnn = layers.LSTM samples = 2", "3, 4, 6)), epochs=1, batch_size=10) # compare to not using", "np.any(ref_mask_val_1, axis=-1) # second RNN layer ref_mask_val = [ref_mask_val_0, ref_mask_val_1,", "outputs) model = Model(inputs, outputs) assert model.uses_learning_phase y1 = to_list(model.predict(X))", "batch_input_shape=(10, 3, 4), dtype='int32')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5, size=(10, 3, 4),", "config model.get_config() model = model_from_json(model.to_json()) model.summary() # test stacked bidirectional", "optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum', 'mul', 'ave', 'concat',", "y1 = to_list(model.predict(X)) y2 = to_list(model.predict(X)) for x1, x2 in", "= Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) _ = layer(x) assert", "all(not getattr(x, '_uses_learning_phase') for x in outputs) inputs = Input((timesteps,", "mask_outputs[-1])] func = K.function([model.input], mask_outputs) mask_outputs_val = func([model_input]) assert np.array_equal(mask_outputs_val[0],", "model_from_json(model.to_json()) model.summary() # test stacked bidirectional layers model = Sequential()", "4), dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output,", "len(model.losses) == 1 model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'), input_shape=(3,", "= func([model_input]) ref_mask_val_0 = model_input > 0 # embedding layer", "if not isinstance(input_shape, list): raise TypeError('expects constants shape') [input_shape, constant_shape]", "x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) _ = layer(x)", "mask_outputs) mask_outputs_val = func([model_input]) assert np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1)) assert np.array_equal(mask_outputs_val[1],", "Sequential model model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps,", "axis=-1)) def test_regularizers(): model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'), input_shape=(3,", "model.set_weights(weights) y_np_3 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4)", "@pytest.mark.skipif(K.backend() == 'theano' or K.backend() == 'mxnet', reason='Not supported.') @pytest.mark.parametrize('merge_mode',", "layers.BatchNormalization(center=True, scale=True), name='bn', input_shape=(10, 2))) model.compile(optimizer='rmsprop', loss='mse') # Assert that", "len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.trainable_weights) == 6", "assert len(model.layers[0].losses) == 1 assert len(model.layers[0].get_losses_for(None)) == 1 assert len(model.losses)", "layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1, inputs=None) assert len(layer.updates) == 4 assert len(layer.get_updates_for(None))", "test_TimeDistributed_trainable(): # test layers that need learning_phase to be set", "np.any(model_input, axis=-1)) def test_regularizers(): model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'),", "constants=c) model = Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_2", "and Embeddings with mask_zero model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True),", "tensor input2 = Input((timesteps, dim)) with pytest.raises(ValueError): output = wrappers.Bidirectional(rnn(units))(input2,", "wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, constants=c) model = Model([x, c], y)", "with functional API x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(model)(x)", "= [np.random.rand(samples, timesteps, dim)] if merge_mode == 'sum': merge_func =", "# test valid usage: passing a list output = wrappers.Bidirectional(rnn(units))(input2,", "== 2 def test_Bidirectional_losses(): x = Input(shape=(3, 2)) layer =", "target_dim = 2 * output_dim if mode == 'concat' else", "0 assert len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.updates)", "y * y_rev elif merge_mode == 'ave': merge_func = lambda", "f_forward = K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged", "states [constant] = constants h_input = K.dot(inputs, self.input_kernel) h_state =", "= Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10, 3, 4), dtype='int32')) model.compile(optimizer='rmsprop', loss='mse')", "= ref_mask_val_0 # first RNN layer ref_mask_val_2 = np.any(ref_mask_val_1, axis=-1)", "supported in CNTK.') def test_Bidirectional_dynamic_timesteps(): # test with functional API", "np.array([1, 1])) # Verify input_map has one mapping from inputs", "timesteps, dim)] outputs = model.predict(inputs) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend", "# Bidirectional and stateful inputs = Input(batch_shape=(1, timesteps, dim)) outputs", "y_np = model.predict([x_np, s_fw_np, s_bk_np, c_np]) weights = model.get_weights() config", "= Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np,", "batch_size=10) # compare to not using batch_input_shape test_input = np.random.randint(5,", "y, y_rev: y + y_rev elif merge_mode == 'mul': merge_func", "with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, initial_state=[s_for, s_bac],", "== 0 assert len(layer.get_updates_for(x)) == 0 layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1, inputs=None)", "y_rev: y + y_rev elif merge_mode == 'mul': merge_func =", "= K.function([model.input], mask_outputs) mask_outputs_val = func([model_input]) assert np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1))", "= layer(x) assert len(layer.losses) == 4 assert len(layer.get_losses_for(None)) == 4", "> 0 # embedding layer ref_mask_val_1 = ref_mask_val_0 # first", "len(layer.get_losses_for(x)) == 0 layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1,", "RNN not supported in CNTK and MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): #", "RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units = units self.state_size =", "= True assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2", "serialization. x_np = np.random.random((6, 5, 5)) c_np = np.random.random((6, 3))", "merge_func = lambda y, y_rev: y + y_rev elif merge_mode", "merge_mode == 'mul': merge_func = lambda y, y_rev: y *", "reason='Flaky with CNTK backend') def test_TimeDistributed_learning_phase(): # test layers that", "y) y = model.predict(np.random.random((10, 3, 2))) assert_allclose(np.mean(y), 0., atol=1e-1, rtol=1e-1)", "x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(model)(x) outer_model = Model(x,", "loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4)) for i", "0 and 1. td = model.layers[0] assert np.array_equal(td.get_weights()[2], np.array([0, 0]))", "Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6,", "for i in range(4): model_input[i, i:, :] = 0. model.compile(optimizer='rmsprop',", "len(layer.trainable_weights) == 2 layer.trainable = False assert len(layer.updates) == 0", "model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 3)), epochs=1, batch_size=10)", "Input((timesteps, dim)) with pytest.raises(ValueError): output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) # test", "valid usage: passing a list output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model", "['sum', 'concat']: x = np.random.random((samples, timesteps, dim)) target_dim = 2", "= {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y =", "layer.trainable = True assert len(layer.updates) == 2 assert len(layer.trainable_weights) ==", "functional API with dynamic length rnn = layers.SimpleRNN samples =", "'ave': merge_func = lambda y, y_rev: (y + y_rev) /", "layer = wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode) f_merged = K.function([inputs], to_list(layer(inputs))) f_forward", "3, 4)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test wrapping", "outputs = wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse',", "batch_input_shape test_input = np.random.random((1, 3, 4)) test_output = model.predict(test_input) weights", "2) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed", "or K.backend() == 'mxnet', reason='Not supported.') @pytest.mark.parametrize('merge_mode', ['sum', 'concat', None])", "TimeDistributed and RNN yet') def test_TimeDistributed_trainable(): # test layers that", "reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with", "= to_list(model.predict(X)) y2 = to_list(model.predict(X)) for x1, x2 in zip(y1,", "x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test return_state", "not support TimeDistributed and RNN yet') def test_TimeDistributed_trainable(): # test", "func([model_input]) ref_mask_val_0 = model_input > 0 # embedding layer ref_mask_val_1", "shape so far: (N, t_1, t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8,", "dtype='int32')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5, size=(10, 3, 4), dtype='int32'), np.random.random((10, 3,", "4, 4, 3)), np.random.random((1, 2, 4, 4, 5))) model =", "learning_phase to be set np.random.seed(1234) x = Input(shape=(3, 2)) y", "output = h_input + h_state + h_const return output, [output]", "model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test config model.get_config()", "= f_backward(X) y_expected = to_list(merge_func(y_forward[0], y_backward[0])) assert len(y_merged) == len(y_expected)", "h_input + h_state + h_const return output, [output] def get_config(self):", "2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test with functional", "output_dim y = np.random.random((samples, target_dim)) inputs = Input((None, dim)) outputs", "epochs=1, batch_size=10) # test wrapping Sequential model model = Sequential()", "with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, c]) model", "layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for, s_bac, c],", "in range(3): assert np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert mask_outputs[-1] is None #", "np import copy from numpy.testing import assert_allclose from keras.utils import", "wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd')", "= model.predict(inputs) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support", "model_input > 0 # embedding layer ref_mask_val_1 = ref_mask_val_0 #", "test_TimeDistributed_with_masking_layer(): # test with Masking layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,),", "to reshaped inputs. uid = object_list_uid(model.inputs) assert len(td._input_map.keys()) == 1", "ref_mask_val[i]) assert mask_outputs[-1] is None # final layer @pytest.mark.skipif(K.backend() ==", "x = np.random.random((samples, timesteps, dim)) target_dim = 2 * output_dim", "inputs = Input((None, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs)", "yet') def test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units", "i in range(3): assert np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert mask_outputs[-1] is None", "dynamic length rnn = layers.SimpleRNN samples = 2 dim =", "model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 3)),", "set x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) _ =", "inputs=None) assert len(layer.updates) == 4 assert len(layer.get_updates_for(None)) == 2 assert", "basic case inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_sequences=True),", "invalid initial_state: passing a tensor input2 = Input((timesteps, dim)) with", "[model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func = K.function([model.input], mask_outputs) mask_outputs_val = func([model_input]) assert", "def test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units =", "3 X = [np.random.rand(samples, timesteps, dim)] if merge_mode == 'sum':", "y_np_2, atol=1e-4) # test flat list inputs with CustomObjectScope(custom_objects): layer", "test_Bidirectional_trainable(): # test layers that need learning_phase to be set", "== 1 assert len(model.losses) == 1 model = Sequential() model.add(wrappers.TimeDistributed(", "np.concatenate((y, y_rev), axis=-1) else: merge_func = lambda y, y_rev: [y,", "2 dim = 5 timesteps = 3 units = 3", "np.random.random((10, 3, 4, 6)), epochs=1, batch_size=10) # compare to not", "with CNTK backend') def test_TimeDistributed_learning_phase(): # test layers that need", "32)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, s_fw_np, s_bk_np,", "return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4),", "self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built = True def call(self,", "TimeDistributed and RNN yet') def test_TimeDistributed_with_masking_layer(): # test with Masking", "np.broadcast_to(np.array([0, 1]), (1, 10, 2))) # Assert that mean and", "lambda y, y_rev: np.concatenate((y, y_rev), axis=-1) else: merge_func = lambda", "initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel", "10, 2)), np.broadcast_to(np.array([0, 1]), (1, 10, 2))) # Assert that", "4, 3)), np.random.random((1, 2, 4, 4, 5))) model = model_from_json(model.to_json())", "with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, constants=c) model", "2 * output_dim if mode == 'concat' else output_dim y", "np.random.random((samples, target_dim)) inputs = Input((None, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate,", "def test_Bidirectional_dynamic_timesteps(): # test with functional API with dynamic length", "input_shape): if not isinstance(input_shape, list): raise TypeError('expects constants shape') [input_shape,", "backend does not support custom RNN cell yet') def test_Bidirectional_with_constants():", "timesteps = 3 units = 3 input1 = Input((timesteps, dim))", "dim)) layer = wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode) f_merged = K.function([inputs], to_list(layer(inputs)))", "test layers that need learning_phase to be set x =", "= Input(shape=(3, 2)) layer = wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1')) _", "timesteps, dim), np.random.rand(samples, timesteps, dim)] outputs = model.predict(inputs) @pytest.mark.skipif(K.backend() ==", "= np.random.random((samples, timesteps, dim)) target_dim = 2 * output_dim if", "(y + y_rev) / 2 elif merge_mode == 'concat': merge_func", "wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x,", "dim)) target_dim = 2 * output_dim if mode == 'concat'", "def test_TimeDistributed_learning_phase(): # test layers that need learning_phase to be", "model_input[i, i:, i:] = 0 model.fit(model_input, np.random.random((10, 1)), epochs=1, batch_size=10)", "wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode) outputs = to_list(wrapped(inputs, training=True))", "specifying a batch_input_shape test_input = np.random.random((1, 3, 4)) test_output =", "from keras.models import Sequential, Model, model_from_json from keras import backend", "= Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse') model_input =", "X = [np.random.rand(samples, timesteps, dim)] if merge_mode == 'sum': merge_func", "np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test with functional API", "np.random.random((6, 3)) y_np = model.predict([x_np, s_fw_np, s_bk_np, c_np]) weights =", "be set x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) _", ":] = 0. model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input, np.random.random((10, 3, 5)), epochs=1,", "def test_Bidirectional(): rnn = layers.SimpleRNN samples = 2 dim =", "with functional API with dynamic length rnn = layers.SimpleRNN samples", "self.input_kernel) h_state = K.dot(prev_output, self.recurrent_kernel) h_const = K.dot(constant, self.constant_kernel) output", "= layer(input1)[1:] # test passing invalid initial_state: passing a tensor", "[model.layers[0].compute_mask(model.input)] mask_outputs += [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func = K.function([model.input], mask_outputs) mask_outputs_val", "Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum',", "np.random.random((1, 2, 4, 4, 5))) model = model_from_json(model.to_json()) model.summary() #", "wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates) == 0 assert len(layer.get_updates_for(None)) == 0 assert", "2 elif merge_mode == 'concat': merge_func = lambda y, y_rev:", "mask_outputs[-1] is None # final layer @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet", "= f_forward(X) y_backward = f_backward(X) y_expected = to_list(merge_func(y_forward[0], y_backward[0])) assert", "assert len(y_merged) == len(y_expected) for x1, x2 in zip(y_merged, y_expected):", "h_const = K.dot(constant, self.constant_kernel) output = h_input + h_state +", "y2): assert_allclose(x1, x2, atol=1e-5) def test_Bidirectional_state_reuse(): rnn = layers.LSTM samples", "= 3 units = 3 X = [np.random.rand(samples, timesteps, dim)]", "ref_mask_val = [ref_mask_val_0, ref_mask_val_1, ref_mask_val_2] for i in range(3): assert", "activity_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.losses) == 1", "CNTK.') def test_Bidirectional_dynamic_timesteps(): # test with functional API with dynamic", "= K.function([inputs], layer.backward_layer.call(inputs)) n_states = len(layer.layer.states) y_merged = f_merged(X) y_forward", "outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test", "model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 3)), epochs=1,", "does not support custom RNN cell yet') def test_Bidirectional_with_constants(): class", "+ h_state + h_const return output, [output] def get_config(self): config", "model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10,", "i:] = 0 model.fit(model_input, np.random.random((10, 1)), epochs=1, batch_size=10) mask_outputs =", "test return_state inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True),", "range(4): model_input[i, i:, i:] = 0 model.fit(model_input, np.random.random((10, 1)), epochs=1,", "3 units = 3 input1 = Input((timesteps, dim)) layer =", "[np.random.rand(samples, timesteps, dim)] inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units,", "= Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)),", "state of a BiRNN is the concatenation of the underlying", "c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 3))],", "h_const return output, [output] def get_config(self): config = {'units': self.units}", "super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) + list(config.items())) # Test basic case.", "Input((timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model =", "MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test with unspecified shape and Embeddings", "list): raise TypeError('expects constants shape') [input_shape, constant_shape] = input_shape #", "def __init__(self, units, **kwargs): self.units = units self.state_size = units", "np.zeros((6, 32)), np.zeros((6, 32)), np.zeros((6, 3))], np.zeros((6, 64)) ) #", "model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 2)), epochs=1,", "assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 @pytest.mark.skipif((K.backend() ==", "['sum', 'concat', None]) def test_Bidirectional_dropout(merge_mode): rnn = layers.LSTM samples =", "model = Model(inputs, outputs) assert model.uses_learning_phase y1 = to_list(model.predict(X)) y2", "x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test", "self.built = True def call(self, inputs, states, constants): [prev_output] =", "s_bac], constants=c) model = Model([x, s_for, s_bac, c], y) model.set_weights(weights)", "will (and should) raise if more than one constant passed", "== 'cntk'), reason='Unknown timestamps not supported in CNTK.') def test_Bidirectional_dynamic_timesteps():", "= True def call(self, inputs, states, constants): [prev_output] = states", "zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test return_state inputs =", "y_backward = y_backward[-n_states:] for state_birnn, state_inner in zip(y_merged, y_forward +", "* output_dim if mode == 'concat' else output_dim y =", "len(layer.updates) == 0 assert len(layer.get_updates_for(None)) == 0 assert len(layer.get_updates_for(x)) ==", "input_map has one mapping from inputs to reshaped inputs. uid", "axis=-1)) assert np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1)) def test_regularizers(): model = Sequential()", "assert_allclose(x1, x2, atol=1e-5) # test if the state of a", "5, 5)), np.zeros((6, 32)), np.zeros((6, 32)), np.zeros((6, 3))], np.zeros((6, 64))", "model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4), dtype='int32')", "test_input = np.random.randint(5, size=(10, 3, 4), dtype='int32') test_output = model.predict(test_input)", "epochs=1, batch_size=10) # test config model.get_config() # test when specifying", "len(model.layers) == 4 assert isinstance(model.layers[-1].input, list) inputs = [np.random.rand(samples, timesteps,", "== 'concat' else output_dim y = np.random.random((samples, target_dim)) # test", "np.random.random((10, 3, 5)), epochs=1, batch_size=6) mask_outputs = [model.layers[0].compute_mask(model.input)] mask_outputs +=", "model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 2)), epochs=1, batch_size=10)", "with unspecified shape and Embeddings with mask_zero model = Sequential()", "= 3 X = [np.random.rand(samples, timesteps, dim)] if merge_mode ==", "= lambda y, y_rev: [y, y_rev] # basic case inputs", "len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 @pytest.mark.skipif((K.backend() == 'cntk'", "inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, c])", "= layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x,", "so far: (N, t_1, t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False)))", "model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y", "False assert len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.trainable_weights)", "supported in CNTK and MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test with", "model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1, 2, 4, 4, 3)), np.random.random((1, 2, 4,", "'mul': merge_func = lambda y, y_rev: y * y_rev elif", "len(layer.trainable_weights) == 2 @pytest.mark.skipif((K.backend() == 'cntk' or K.backend() == 'mxnet'),", "model = Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_3 =", "= [ref_mask_val_0, ref_mask_val_1, ref_mask_val_2] for i in range(3): assert np.array_equal(mask_outputs_val[i],", "['sum', 'mul', 'ave', 'concat', None]) def test_Bidirectional_merged_value(merge_mode): rnn = layers.LSTM", "np.zeros((6, 64)) ) # Test basic case serialization. x_np =", "call(self, inputs, states, constants): [prev_output] = states [constant] = constants", "for x in outputs) model = Model(inputs, outputs) assert model.uses_learning_phase", "s_bk_np, c_np]) weights = model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects):", "layer(input1)[1:] # test passing invalid initial_state: passing a tensor input2", "3 X = [np.random.rand(samples, timesteps, dim)] inputs = Input((timesteps, dim))", "Model([input1, input2], output) assert len(model.layers) == 4 assert isinstance(model.layers[-1].input, list)", "len(layer.trainable_weights) == 6 def test_Bidirectional_updates(): x = Input(shape=(3, 2)) layer", "inputs=x) layer.backward_layer.add_loss(1, inputs=None) assert len(layer.losses) == 8 assert len(layer.get_losses_for(None)) ==", "= 0 model.fit(model_input, np.random.random((10, 1)), epochs=1, batch_size=10) mask_outputs = [model.layers[0].compute_mask(model.input)]", "4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 2)),", "10., c_np]) with pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s, atol=1e-4) # test flat", "model.compile(optimizer='rmsprop', loss='mse') assert len(model.layers[0].layer.losses) == 1 assert len(model.layers[0].losses) == 1", "assert K.int_shape(td._input_map[uid]) == (None, 2) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend", "== 0 layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1, inputs=None)", "layers model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode, input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim),", "np.array([1, 1])) # Train model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10, 2)), np.broadcast_to(np.array([0,", "= Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode) f_merged =", "= False assert len(layer.trainable_weights) == 0 layer.trainable = True assert", "test with Masking layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4)))", "layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1, inputs=None) assert len(layer.updates) == 4", "lambda y, y_rev: (y + y_rev) / 2 elif merge_mode", "0. model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input, np.random.random((10, 3, 5)), epochs=1, batch_size=6) mask_outputs", "test with Dense layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4)))", "3, 4), dtype='int32')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5, size=(10, 3, 4), dtype='int32'),", "= Input((timesteps, dim)) with pytest.raises(ValueError): output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) #", "# Train model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10, 2)), np.broadcast_to(np.array([0, 1]), (1,", "3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test with", "from keras import layers from keras.models import Sequential, Model, model_from_json", "CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, initial_state=[s_for, s_bac], constants=c)", "np.zeros((6, 3))], np.zeros((6, 64)) ) # Test basic case serialization.", "model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 2)), epochs=1, batch_size=10) # test", "(and should) raise if more than one constant passed self.input_kernel", "y, epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum', 'mul', 'ave', 'concat', None]) def", "samples = 2 dim = 2 timesteps = 2 output_dim", "merge_func = lambda y, y_rev: y * y_rev elif merge_mode", "Input((5, 5)) c = Input((3,)) s_for = Input((32,)) s_bac =", "c]) model = Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_3", "layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1, inputs=None) assert len(layer.losses) == 8", "assert mask_outputs[-1] is None # final layer @pytest.mark.skipif(K.backend() == 'mxnet',", "4 assert isinstance(model.layers[-1].input, list) inputs = [np.random.rand(samples, timesteps, dim), np.random.rand(samples,", "2 layer.trainable = False assert len(layer.updates) == 0 assert len(layer.trainable_weights)", "Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates) == 0 assert", "True assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 @pytest.mark.skipif((K.backend()", "assert len(model.layers) == 4 assert isinstance(model.layers[-1].input, list) inputs = [np.random.rand(samples,", "layers.Dense(2, kernel_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.layers[0].layer.losses) ==", "= len(layer.layer.states) y_merged = f_merged(X) y_forward = f_forward(X) y_backward =", "# first, test with Dense layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2),", "Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert", "model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10,", "= np.random.random((6, 3)) y_np = model.predict([x_np, c_np]) weights = model.get_weights()", "custom RNN cell yet') def test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer): def __init__(self,", "= model_from_json(model.to_json()) model.summary() # test stacked layers model = Sequential()", "c = Input((3,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants}", "from keras.layers import wrappers, Input, Layer from keras.layers import RNN", "dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05)", "a BiRNN is the concatenation of the underlying RNNs y_merged", "layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, c]) model = Model([x,", "test with Conv2D model = Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2), padding='same'),", "'mul', 'ave', 'concat', None]) def test_Bidirectional_merged_value(merge_mode): rnn = layers.LSTM samples", "name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel =", "input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1)", "merge_mode=mode, input_shape=(timesteps, dim))) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) #", "np.array_equal(td.get_weights()[3], np.array([1, 1])) # Verify input_map has one mapping from", "config = {'units': self.units} base_config = super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items())", "= Input((32,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with", "== 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN", "mask_outputs_val = func([model_input]) ref_mask_val_0 = model_input > 0 # embedding", "s_for, s_bac, c]) model = Model([x, s_for, s_bac, c], y)", "wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) # test valid usage: passing a list output", "Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) _ = layer(x) assert len(layer.trainable_weights)", "K.dot(prev_output, self.recurrent_kernel) h_const = K.dot(constant, self.constant_kernel) output = h_input +", "f_backward = K.function([inputs], layer.backward_layer.call(inputs)) n_states = len(layer.layer.states) y_merged = f_merged(X)", "import wrappers, Input, Layer from keras.layers import RNN from keras", "initial_state=state) model = Model([input1, input2], output) assert len(model.layers) == 4", "shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform',", "== 1 def test_Bidirectional(): rnn = layers.SimpleRNN samples = 2", "model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum', 'mul', 'ave', 'concat', None])", "4 assert len(layer.get_updates_for(None)) == 2 assert len(layer.get_updates_for(x)) == 2 def", "Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)),", "atol=1e-4) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support custom", "h_input = K.dot(inputs, self.input_kernel) h_state = K.dot(prev_output, self.recurrent_kernel) h_const =", "[K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged = f_merged(X) y_expected = to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert", "= model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3, 4), dtype='int32'))", "weights = model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects): layer =", "layer ref_mask_val_1 = ref_mask_val_0 # first RNN layer ref_mask_val_2 =", "= Input(batch_shape=(1, timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs) model", "# test layers that need learning_phase to be set x", "not supported in CNTK.') def test_Bidirectional_dynamic_timesteps(): # test with functional", "import copy from numpy.testing import assert_allclose from keras.utils import CustomObjectScope", "model model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps, dim)))", "should) raise if more than one constant passed self.input_kernel =", "axis=-1) # second RNN layer ref_mask_val = [ref_mask_val_0, ref_mask_val_1, ref_mask_val_2]", "2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) _ = layer(x) assert len(layer.trainable_weights) ==", "= [model.layers[0].compute_mask(model.input)] for layer in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func =", "func = K.function([model.input], mask_outputs[:-1]) mask_outputs_val = func([model_input]) ref_mask_val_0 = model_input", "np.random.seed(1234) x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model", "lambda y, y_rev: y * y_rev elif merge_mode == 'ave':", "None]) def test_Bidirectional_merged_value(merge_mode): rnn = layers.LSTM samples = 2 dim", "and stateful inputs = Input(batch_shape=(1, timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim,", "[np.zeros((6, 5, 5)), np.zeros((6, 32)), np.zeros((6, 32)), np.zeros((6, 3))], np.zeros((6,", "_ = layer(x) assert len(layer.trainable_weights) == 6 layer.trainable = False", "x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model =", "c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # test flat list inputs with", "outputs = to_list(wrapped(inputs, training=True)) assert all(not getattr(x, '_uses_learning_phase') for x", "yet') @pytest.mark.skipif((K.backend() == 'cntk'), reason='Flaky with CNTK backend') def test_TimeDistributed_learning_phase():", "= np.random.random((6, 32)) s_bk_np = np.random.random((6, 32)) c_np = np.random.random((6,", "= K.dot(constant, self.constant_kernel) output = h_input + h_state + h_const", "for layer in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func = K.function([model.input], mask_outputs[:-1])", "self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape): if not", "= model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # verify", "name='bn', input_shape=(10, 2))) model.compile(optimizer='rmsprop', loss='mse') # Assert that mean and", "6), input_shape=(3, 4), dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input)", "= 2 dropout_rate = 0.2 for mode in ['sum', 'concat']:", "assert len(model.layers[0].get_losses_for(None)) == 1 assert len(model.losses) == 1 model =", "raise if more than one constant passed self.input_kernel = self.add_weight(", "y_np_2 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # test flat", "inputs to reshaped inputs. uid = object_list_uid(model.inputs) assert len(td._input_map.keys()) ==", "= wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True)) state = layer(input1)[1:] # test passing", "wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x,", "keras.utils import CustomObjectScope from keras.layers import wrappers, Input, Layer from", "dim = 2 timesteps = 2 output_dim = 2 dropout_rate", "batch_size=10) # test with functional API x = Input(shape=(3, 2))", "mean and variance are 0 and 1. td = model.layers[0]", "5)), np.zeros((6, 32)), np.zeros((6, 32)), np.zeros((6, 3))], np.zeros((6, 64)) )", "test with BatchNormalization model = Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True), name='bn',", "s_bac, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, s_fw_np, s_bk_np, c_np])", "shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units, self.units), initializer='uniform',", "# the shape so far: (N, t_1, t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7,", "2 for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5)", "= Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2), padding='same'), input_shape=(2, 4, 4, 3)))", "3, 3)), epochs=1, batch_size=10) # test with functional API x", "units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape): if not isinstance(input_shape, list):", "== 'concat': merge_func = lambda y, y_rev: np.concatenate((y, y_rev), axis=-1)", "# test with BatchNormalization model = Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True),", "model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True), input_shape=(None, None))) # the shape so far:", "assert len(layer.updates) == 0 assert len(layer.get_updates_for(None)) == 0 assert len(layer.get_updates_for(x))", "does not support TimeDistributed and RNN yet') def test_TimeDistributed_trainable(): #", "loss='mse') assert len(model.losses) == 1 def test_Bidirectional(): rnn = layers.SimpleRNN", "has one mapping from inputs to reshaped inputs. uid =", "with mask_zero model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True), input_shape=(None, None)))", "Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode) f_merged = K.function([inputs],", "= Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode) outputs", "optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # Bidirectional and stateful inputs", "wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model = Model([input1, input2], output) assert len(model.layers) ==", "ref_mask_val_0 # first RNN layer ref_mask_val_2 = np.any(ref_mask_val_1, axis=-1) #", "model = Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_2 =", "y, y_rev: (y + y_rev) / 2 elif merge_mode ==", "is None # final layer @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend", "= model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6),", "model_input = np.random.randint(low=1, high=5, size=(10, 3, 4)) for i in", "import numpy as np import copy from numpy.testing import assert_allclose", "initializer='uniform', name='constant_kernel') self.built = True def call(self, inputs, states, constants):", "RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, initial_state=[s_for,", "@pytest.mark.skipif((K.backend() == 'cntk'), reason='Unknown timestamps not supported in CNTK.') def", "assert model.uses_learning_phase y1 = to_list(model.predict(X)) y2 = to_list(model.predict(X)) for x1,", "one mapping from inputs to reshaped inputs. uid = object_list_uid(model.inputs)", "CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, constants=c) model =", "4, 6)), epochs=1, batch_size=10) # compare to not using batch_input_shape", "test with functional API x = Input(shape=(3, 2)) y =", "test config model.get_config() # test when specifying a batch_input_shape test_input", "state = layer(input1)[1:] # test passing invalid initial_state: passing a", "== 0 layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1, inputs=None)", "== 6 assert len(layer.get_losses_for(x)) == 2 if __name__ == '__main__':", "Test basic case serialization. x_np = np.random.random((6, 5, 5)) c_np", "2)) layer = wrappers.TimeDistributed(layers.BatchNormalization()) _ = layer(x) assert len(layer.updates) ==", "y = model.predict(np.random.random((10, 3, 2))) assert_allclose(np.mean(y), 0., atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend()", "s_bac], constants=c) model = Model([x, s_for, s_bac, c], y) model.compile(optimizer='rmsprop',", "= units self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape):", "6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse') model_input", "4)) for i in range(4): model_input[i, i:, :] = 0.", "model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.skipif((K.backend() == 'cntk'), reason='Unknown timestamps not", "model_from_json from keras import backend as K from keras.utils.generic_utils import", "= Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np,", "reason='MXNet backend does not support TimeDistributed and RNN yet') @pytest.mark.skipif((K.backend()", "class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units = units self.state_size", "loss='mse') model.fit(np.random.randint(5, size=(10, 3, 4), dtype='int32'), np.random.random((10, 3, 4, 6)),", "3)), np.random.random((1, 2, 4, 4, 5))) model = model_from_json(model.to_json()) model.summary()", "layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, constants=c) model = Model([x,", "are 0 and 1. td = model.layers[0] assert np.array_equal(td.get_weights()[2], np.array([0,", "None]) def test_Bidirectional_dropout(merge_mode): rnn = layers.LSTM samples = 2 dim", "elif merge_mode == 'ave': merge_func = lambda y, y_rev: (y", "wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, c]) model = Model([x, c], y)", "Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.skipif((K.backend() ==", "= RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer =", "x = Input((5, 5)) c = Input((3,)) s_for = Input((32,))", "def test_Bidirectional_dropout(merge_mode): rnn = layers.LSTM samples = 2 dim =", "outputs) assert model.uses_learning_phase y1 = to_list(model.predict(X)) y2 = to_list(model.predict(X)) for", "y_rev: (y + y_rev) / 2 elif merge_mode == 'concat':", "2)) y = wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model = Model(x, y) y", "'sum': merge_func = lambda y, y_rev: y + y_rev elif", "c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np,", "# test with Embedding model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10,", "== 'mxnet', reason='MXNet backend does not support custom RNN cell", "serialization. x_np = np.random.random((6, 5, 5)) s_fw_np = np.random.random((6, 32))", "outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum', 'mul',", "dropout=0.2, return_state=True), merge_mode=merge_mode) outputs = to_list(wrapped(inputs)) assert all(x._uses_learning_phase for x", "layer = wrappers.TimeDistributed(layers.BatchNormalization()) _ = layer(x) assert len(layer.updates) == 2", "len(y_expected) + n_states * 2 for x1, x2 in zip(y_merged,", "batch_size=1) @pytest.mark.skipif((K.backend() == 'cntk'), reason='Unknown timestamps not supported in CNTK.')", "build(self, input_shape): if not isinstance(input_shape, list): raise TypeError('expects constants shape')", "else: merge_func = lambda y, y_rev: [y, y_rev] # basic", "2)) y = wrappers.TimeDistributed(model)(x) outer_model = Model(x, y) outer_model.compile(optimizer='rmsprop', loss='mse')", "wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model = Model(x, y) y = model.predict(np.random.random((10, 3,", "return_sequences=True), merge_mode=merge_mode) f_merged = K.function([inputs], to_list(layer(inputs))) f_forward = K.function([inputs], [layer.forward_layer.call(inputs)])", "model.train_on_batch(np.random.random((1, 2, 4, 4, 3)), np.random.random((1, 2, 4, 4, 5)))", "np.random.randint(5, size=(10, 3, 4), dtype='int32') test_output = model.predict(test_input) weights =", "dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True)) state = layer(input1)[1:] #", "c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np,", "for state_birnn, state_inner in zip(y_merged, y_forward + y_backward): assert_allclose(state_birnn, state_inner,", "assert len(layer.get_updates_for(x)) == 2 def test_Bidirectional_losses(): x = Input(shape=(3, 2))", "True def call(self, inputs, states, constants): [prev_output] = states [constant]", "4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.losses) == 1 def test_Bidirectional():", "= wrappers.Bidirectional(layers.SimpleRNN(3)) _ = layer(x) assert len(layer.trainable_weights) == 6 layer.trainable", "changed. assert not np.array_equal(td.get_weights()[2], np.array([0, 0])) assert not np.array_equal(td.get_weights()[3], np.array([1,", "y_np_2, atol=1e-4) # verify that state is used y_np_2_different_s =", "epochs=1, batch_size=10) # test with BatchNormalization model = Sequential() model.add(wrappers.TimeDistributed(", "assert len(y_merged) == len(y_expected) + n_states * 2 for x1,", "= np.random.random((6, 5, 5)) c_np = np.random.random((6, 3)) y_np =", "lambda y, y_rev: [y, y_rev] # basic case inputs =", "= model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config))", "constants shape') [input_shape, constant_shape] = input_shape # will (and should)", "= input_shape # will (and should) raise if more than", "= Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.skipif((K.backend()", "None # final layer @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does", "mean and variance changed. assert not np.array_equal(td.get_weights()[2], np.array([0, 0])) assert", "4), dtype='int32')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5, size=(10, 3, 4), dtype='int32'), np.random.random((10,", "# test with Masking layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None,", "usage: passing a list output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model =", "initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for, s_bac, c], y)", "= wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model = Model(x, y) y = model.predict(np.random.random((10,", "layer(x, constants=c) model = Model([x, c], y) model.set_weights(weights) y_np_2 =", "len(td._input_map.keys()) == 1 assert uid in td._input_map assert K.int_shape(td._input_map[uid]) ==", "and RNN yet') def test_TimeDistributed_with_masking_layer(): # test with Masking layer", "6, mask_zero=True), input_shape=(None, None))) # the shape so far: (N,", "self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight(", "1 assert uid in td._input_map assert K.int_shape(td._input_map[uid]) == (None, 2)", "second RNN layer ref_mask_val = [ref_mask_val_0, ref_mask_val_1, ref_mask_val_2] for i", "'concat' else output_dim y = np.random.random((samples, target_dim)) inputs = Input((None,", "assert len(layer.losses) == 4 assert len(layer.get_losses_for(None)) == 4 assert len(layer.get_losses_for(x))", "= 0. model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input, np.random.random((10, 3, 5)), epochs=1, batch_size=6)", "test with Embedding model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10, 3,", "model.fit(x, y, epochs=1, batch_size=1) # Bidirectional and stateful inputs =", "wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode) f_merged = K.function([inputs], to_list(layer(inputs))) f_forward = K.function([inputs],", "epochs=1, batch_size=1) @pytest.mark.skipif((K.backend() == 'cntk'), reason='Unknown timestamps not supported in", "constant_shape] = input_shape # will (and should) raise if more", "reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Conv2D model =", "3)), epochs=1, batch_size=10) # test with BatchNormalization model = Sequential()", "= wrappers.TimeDistributed(model)(x) outer_model = Model(x, y) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3,", "layers that need learning_phase to be set np.random.seed(1234) x =", "= np.random.random((samples, target_dim)) # test with Sequential model model =", "ref_mask_val_0 = model_input > 0 # embedding layer ref_mask_val_1 =", "1. td = model.layers[0] assert np.array_equal(td.get_weights()[2], np.array([0, 0])) assert np.array_equal(td.get_weights()[3],", "weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3, 4)))", "assert len(td._input_map.keys()) == 1 assert uid in td._input_map assert K.int_shape(td._input_map[uid])", "return dict(list(base_config.items()) + list(config.items())) # Test basic case. x =", "y = layer(x, constants=c) model = Model([x, c], y) model.compile(optimizer='rmsprop',", "= Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)),", "inputs=x) layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1, inputs=None) assert len(layer.updates) ==", "= to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert len(y_merged) == len(y_expected) for x1, x2", "= K.dot(inputs, self.input_kernel) h_state = K.dot(prev_output, self.recurrent_kernel) h_const = K.dot(constant,", "= np.any(ref_mask_val_1, axis=-1) # second RNN layer ref_mask_val = [ref_mask_val_0,", "atol=1e-05) # test with Embedding model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6),", "model = Model([input1, input2], output) assert len(model.layers) == 4 assert", "s_bk_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # verify that state is", "test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test with unspecified shape and Embeddings with mask_zero", "test stacked bidirectional layers model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode,", "Masking layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop',", "1])) # Train model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10, 2)), np.broadcast_to(np.array([0, 1]),", "= Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode',", "training=True) model = Model(x, y) y = model.predict(np.random.random((10, 3, 2)))", "mode in ['sum', 'concat']: x = np.random.random((samples, timesteps, dim)) target_dim", "more than one constant passed self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units),", "4, 4, 3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1, 2, 4, 4,", "not support TimeDistributed and RNN yet') @pytest.mark.skipif((K.backend() == 'cntk'), reason='Flaky", "assert_allclose(state_birnn, state_inner, atol=1e-5) @pytest.mark.skipif(K.backend() == 'theano' or K.backend() == 'mxnet',", "timesteps = 2 output_dim = 2 dropout_rate = 0.2 for", "Model(inputs, outputs) assert model.uses_learning_phase y1 = to_list(model.predict(X)) y2 = to_list(model.predict(X))", "assert_allclose(y_np, y_np_3, atol=1e-4) def test_Bidirectional_trainable(): # test layers that need", "RNN yet') def test_TimeDistributed(): # first, test with Dense layer", "weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3, 4),", "dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode) f_merged = K.function([inputs], layer(inputs))", "0])) assert np.array_equal(td.get_weights()[3], np.array([1, 1])) # Train model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1,", "model = Model(x, y) y = model.predict(np.random.random((10, 3, 2))) assert_allclose(np.mean(y),", "len(model.layers[0].layer.losses) == 1 assert len(model.layers[0].losses) == 1 assert len(model.layers[0].get_losses_for(None)) ==", "object_list_uid, to_list @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support", "final layer @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support", "model.predict([x_np, c_np]) weights = model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects):", "layer.backward_layer.add_update(1, inputs=None) assert len(layer.updates) == 4 assert len(layer.get_updates_for(None)) == 2", "model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True), input_shape=(None, None))) # the", "model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps, dim))) model.compile(loss='mse',", "layer(x) assert len(layer.trainable_weights) == 6 layer.trainable = False assert len(layer.trainable_weights)", "y = layer([x, s_for, s_bac, c]) model = Model([x, s_for,", "mapping from inputs to reshaped inputs. uid = object_list_uid(model.inputs) assert", "in range(4): model_input[i, i:, i:] = 0 model.fit(model_input, np.random.random((10, 1)),", "@pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and", "4, 3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1, 2, 4, 4, 3)),", "to_list(model.predict(X)) y2 = to_list(model.predict(X)) for x1, x2 in zip(y1, y2):", "= self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units,", "constants): [prev_output] = states [constant] = constants h_input = K.dot(inputs,", "= Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse')", "timestamps not supported in CNTK.') def test_Bidirectional_dynamic_timesteps(): # test with", "Test basic case. x = Input((5, 5)) c = Input((3,))", "model.set_weights(weights) y_np_3 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend() ==", "== 'theano' or K.backend() == 'mxnet', reason='Not supported.') @pytest.mark.parametrize('merge_mode', ['sum',", "uid = object_list_uid(model.inputs) assert len(td._input_map.keys()) == 1 assert uid in", "def test_Bidirectional_updates(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) assert", "ref_mask_val_2 = np.any(ref_mask_val_1, axis=-1) # second RNN layer ref_mask_val =", "2 dim = 2 timesteps = 2 output_dim = 2", "for x in outputs) inputs = Input((timesteps, dim)) wrapped =", "a list output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model = Model([input1, input2],", "c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4)", "test config model.get_config() model = model_from_json(model.to_json()) model.summary() # test stacked", "wrappers.TimeDistributed(model)(x) outer_model = Model(x, y) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)),", "model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.layers[0].layer.losses)", "s_for, s_bac, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, s_fw_np, s_bk_np,", "== 4 assert isinstance(model.layers[-1].input, list) inputs = [np.random.rand(samples, timesteps, dim),", "[constant] = constants h_input = K.dot(inputs, self.input_kernel) h_state = K.dot(prev_output,", "y) model.set_weights(weights) y_np_3 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_3,", "= np.random.random((6, 32)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np,", "and variance changed. assert not np.array_equal(td.get_weights()[2], np.array([0, 0])) assert not", "0 # embedding layer ref_mask_val_1 = ref_mask_val_0 # first RNN", "<filename>tests/keras/layers/wrappers_test.py<gh_stars>100-1000 import pytest import numpy as np import copy from", "# test if the state of a BiRNN is the", "constants=c) model = Model([x, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np,", "y + y_rev elif merge_mode == 'mul': merge_func = lambda", "# embedding layer ref_mask_val_1 = ref_mask_val_0 # first RNN layer", "keras import layers from keras.models import Sequential, Model, model_from_json from", "the state of a BiRNN is the concatenation of the", "timestamps for RNN not supported in CNTK and MXNet.') def", "t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse')", "x in outputs) inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units,", "Conv2D model = Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2), padding='same'), input_shape=(2, 4,", "len(layer.updates) == 0 assert len(layer.trainable_weights) == 0 layer.trainable = True", "mask_outputs = [model.layers[0].compute_mask(model.input)] for layer in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func", "Model([x, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_3,", "RNN from keras import layers from keras.models import Sequential, Model,", "assert_allclose(y_np, y_np_2_different_s, atol=1e-4) # test flat list inputs with CustomObjectScope(custom_objects):", "reason='Unknown timestamps for RNN not supported in CNTK and MXNet.')", "model = Model([x, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, c_np])", "= 0.2 for mode in ['sum', 'concat']: x = np.random.random((samples,", "= [np.random.rand(samples, timesteps, dim)] inputs = Input((timesteps, dim)) wrapped =", "0., atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not", "assert all(x._uses_learning_phase for x in outputs) model = Model(inputs, outputs)", "model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse') model_input =", "inputs=None) layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1, inputs=None) assert len(layer.losses) == 8 assert", "== len(y_expected) for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2,", "with Embedding model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10, 3, 4),", "= Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates) == 0", "4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3,", "list inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x,", "dropout_rate = 0.2 for mode in ['sum', 'concat']: x =", "1])) # Verify input_map has one mapping from inputs to", "s_fw_np = np.random.random((6, 32)) s_bk_np = np.random.random((6, 32)) c_np =", "assert len(layer.get_losses_for(None)) == 4 assert len(layer.get_losses_for(x)) == 0 layer.forward_layer.add_loss(0, inputs=x)", "test when specifying a batch_input_shape test_input = np.random.random((1, 3, 4))", "func([model_input]) assert np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1)) assert np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1)) def", "# test stacked layers model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4)))", "to be set np.random.seed(1234) x = Input(shape=(3, 2)) y =", "# test wrapping Sequential model model = Sequential() model.add(layers.Dense(3, input_dim=2))", "layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, initial_state=[s_for,", "dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs,", "keras import backend as K from keras.utils.generic_utils import object_list_uid, to_list", "support TimeDistributed and RNN yet') @pytest.mark.skipif((K.backend() == 'cntk'), reason='Flaky with", "y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support", "model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10, 3, 4), dtype='int32')) model.compile(optimizer='rmsprop',", "= layer([x, s_for, s_bac, c]) model = Model([x, s_for, s_bac,", "3, 4)) test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference =", "assert len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.trainable_weights) ==", "f_merged(X) y_expected = to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert len(y_merged) == len(y_expected) for", "= model_input > 0 # embedding layer ref_mask_val_1 = ref_mask_val_0", "x = Input((5, 5)) c = Input((3,)) cell = RNNCellWithConstants(32)", "= Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True), name='bn', input_shape=(10, 2))) model.compile(optimizer='rmsprop', loss='mse')", "Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode, input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse', optimizer='sgd')", "assert_allclose(np.mean(y), 0., atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does", "input_shape=(timesteps, dim))) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test", "from keras.utils.generic_utils import object_list_uid, to_list @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend", "state_inner, atol=1e-5) @pytest.mark.skipif(K.backend() == 'theano' or K.backend() == 'mxnet', reason='Not", "epochs=1, batch_size=10) # compare to not using batch_input_shape test_input =", "wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1')) _ = layer(x) assert len(layer.losses) ==", "x1, x2 in zip(y1, y2): assert_allclose(x1, x2, atol=1e-5) def test_Bidirectional_state_reuse():", "model = Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5,", "outputs = model.predict(inputs) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not", "y_rev) / 2 elif merge_mode == 'concat': merge_func = lambda", "y) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1,", "= to_list(wrapped(inputs)) assert all(x._uses_learning_phase for x in outputs) model =", "'concat', None]) def test_Bidirectional_merged_value(merge_mode): rnn = layers.LSTM samples = 2", "epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum', 'mul', 'ave', 'concat', None]) def test_Bidirectional_merged_value(merge_mode):", "API inputs = Input((timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate),", "or K.backend() == 'mxnet'), reason='Unknown timestamps for RNN not supported", "wrappers.Bidirectional(layers.SimpleRNN(3)) _ = layer(x) assert len(layer.trainable_weights) == 6 layer.trainable =", "y = wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model = Model(x, y) y =", "f_merged = K.function([inputs], to_list(layer(inputs))) f_forward = K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward =", "TimeDistributed and RNN yet') def test_TimeDistributed(): # first, test with", "= model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) def test_Bidirectional_trainable():", "= h_input + h_state + h_const return output, [output] def", "model.predict([x_np, s_fw_np, s_bk_np, c_np]) weights = model.get_weights() config = layer.get_config()", "to_list(wrapped(inputs, training=True)) assert all(not getattr(x, '_uses_learning_phase') for x in outputs)", "assert uid in td._input_map assert K.int_shape(td._input_map[uid]) == (None, 2) @pytest.mark.skipif(K.backend()", "units, **kwargs): self.units = units self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs)", "for mode in ['sum', 'concat']: x = np.random.random((samples, timesteps, dim))", "+ 10., s_bk_np + 10., c_np]) with pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s,", "@pytest.mark.skipif((K.backend() == 'cntk' or K.backend() == 'mxnet'), reason='Unknown timestamps for", "model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # Bidirectional and stateful", "# test with Conv2D model = Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2),", "assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 layer.trainable =", "[np.random.rand(samples, timesteps, dim)] if merge_mode == 'sum': merge_func = lambda", "def get_config(self): config = {'units': self.units} base_config = super(RNNCellWithConstants, self).get_config()", "model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 3))], np.zeros((6, 64)) ) #", "# will (and should) raise if more than one constant", "set x = Input(shape=(3, 2)) layer = wrappers.TimeDistributed(layers.BatchNormalization()) _ =", "loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) #", "test_Bidirectional_state_reuse(): rnn = layers.LSTM samples = 2 dim = 5", "model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.skipif((K.backend() == 'cntk'), reason='Unknown", "Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True), name='bn', input_shape=(10, 2))) model.compile(optimizer='rmsprop', loss='mse') #", "10, 2))) # Assert that mean and variance changed. assert", "Model(x, y) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)),", "# verify that state is used y_np_2_different_s = model.predict([x_np, s_fw_np", "numpy.testing import assert_allclose from keras.utils import CustomObjectScope from keras.layers import", "+ h_const return output, [output] def get_config(self): config = {'units':", "= Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse')", "s_for = Input((32,)) s_bac = Input((32,)) cell = RNNCellWithConstants(32) custom_objects", "test_TimeDistributed(): # first, test with Dense layer model = Sequential()", "y, y_rev: [y, y_rev] # basic case inputs = Input((timesteps,", "using batch_input_shape test_input = np.random.randint(5, size=(10, 3, 4), dtype='int32') test_output", "3)) y_np = model.predict([x_np, c_np]) weights = model.get_weights() config =", "def test_regularizers(): model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'), input_shape=(3, 4)))", "assert len(layer.get_losses_for(None)) == 6 assert len(layer.get_losses_for(x)) == 2 if __name__", "# test with unspecified shape and Embeddings with mask_zero model", "with Conv2D model = Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2), padding='same'), input_shape=(2,", "5)), epochs=1, batch_size=6) mask_outputs = [model.layers[0].compute_mask(model.input)] mask_outputs += [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])]", "K.backend() == 'mxnet', reason='Not supported.') @pytest.mark.parametrize('merge_mode', ['sum', 'concat', None]) def", "output, [output] def get_config(self): config = {'units': self.units} base_config =", "in td._input_map assert K.int_shape(td._input_map[uid]) == (None, 2) @pytest.mark.skipif(K.backend() == 'mxnet',", "mode == 'concat' else output_dim y = np.random.random((samples, target_dim)) #", "self.units), initializer='uniform', name='constant_kernel') self.built = True def call(self, inputs, states,", "'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet')", "2 timesteps = 2 output_dim = 2 dropout_rate = 0.2", "TypeError('expects constants shape') [input_shape, constant_shape] = input_shape # will (and", "* y_rev elif merge_mode == 'ave': merge_func = lambda y,", "= self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1],", "y_forward = y_forward[-n_states:] y_backward = y_backward[-n_states:] for state_birnn, state_inner in", "y_np_3 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) def", "for RNN not supported in CNTK and MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape():", "zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test if the state", "1 def test_Bidirectional(): rnn = layers.SimpleRNN samples = 2 dim", "np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1)) def test_regularizers(): model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2,", "= K.function([inputs], layer.forward_layer.call(inputs)) f_backward = K.function([inputs], layer.backward_layer.call(inputs)) n_states = len(layer.layer.states)", "constants=c) model = Model([x, s_for, s_bac, c], y) model.compile(optimizer='rmsprop', loss='mse')", "to_list @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed", "h_state = K.dot(prev_output, self.recurrent_kernel) h_const = K.dot(constant, self.constant_kernel) output =", "0 assert len(layer.get_updates_for(None)) == 0 assert len(layer.get_updates_for(x)) == 0 layer.forward_layer.add_update(0,", "= f_merged(X) y_forward = f_forward(X) y_backward = f_backward(X) y_expected =", "basic case. x = Input((5, 5)) c = Input((3,)) s_for", "test with Sequential model model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate),", "test valid usage: passing a list output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state)", "= model.predict([x_np, c_np]) weights = model.get_weights() config = layer.get_config() with", "len(model.layers[0].get_losses_for(None)) == 1 assert len(model.losses) == 1 model = Sequential()", "[layer.forward_layer.call(inputs)]) f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged = f_merged(X) y_expected", "'concat']: x = np.random.random((samples, timesteps, dim)) target_dim = 2 *", "(2, 2), padding='same'), input_shape=(2, 4, 4, 3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse')", "np.any(model_input, axis=-1)) assert np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1)) def test_regularizers(): model =", "self.recurrent_kernel = self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight(", "merge_mode == 'ave': merge_func = lambda y, y_rev: (y +", "pytest.raises(ValueError): output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) # test valid usage: passing", "s_bk_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) def test_Bidirectional_trainable(): # test layers", "model_input[i, i:, :] = 0. model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input, np.random.random((10, 3,", "range(4): model_input[i, i:, :] = 0. model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input, np.random.random((10,", "0.2 for mode in ['sum', 'concat']: x = np.random.random((samples, timesteps,", "= model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3, 4))) reference.add(layers.Activation('relu'))", "reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3, 4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output =", "y, y_rev: y * y_rev elif merge_mode == 'ave': merge_func", "3 input1 = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True))", "# test config model.get_config() model = model_from_json(model.to_json()) model.summary() # test", "to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert len(y_merged) == len(y_expected) for x1, x2 in", "model.uses_learning_phase y1 = to_list(model.predict(X)) y2 = to_list(model.predict(X)) for x1, x2", "def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test with unspecified shape and Embeddings with", "layer([x, c]) model = Model([x, c], y) model.set_weights(weights) y_np_3 =", "Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10, 3, 4), dtype='int32')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5,", "kernel_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.layers[0].layer.losses) == 1", "np.array_equal(td.get_weights()[2], np.array([0, 0])) assert not np.array_equal(td.get_weights()[3], np.array([1, 1])) # Verify", "32)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test basic case", "model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop',", "outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3,", "len(model.layers[0].losses) == 1 assert len(model.layers[0].get_losses_for(None)) == 1 assert len(model.losses) ==", "target_dim)) # test with Sequential model model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim,", "keras.layers import wrappers, Input, Layer from keras.layers import RNN from", "x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates) ==", "== 0 layer.trainable = True assert len(layer.updates) == 2 assert", "model model = Sequential() model.add(layers.Dense(3, input_dim=2)) outer_model = Sequential() outer_model.add(wrappers.TimeDistributed(model,", "[y, y_rev] # basic case inputs = Input((timesteps, dim)) layer", "np.random.random((samples, timesteps, dim)) target_dim = 2 * output_dim if mode", "== 2 @pytest.mark.skipif((K.backend() == 'cntk' or K.backend() == 'mxnet'), reason='Unknown", "c = Input((3,)) s_for = Input((32,)) s_bac = Input((32,)) cell", "== 2 assert len(layer.get_updates_for(x)) == 2 def test_Bidirectional_losses(): x =", "batch_size=1) # test with functional API inputs = Input((timesteps, dim))", "outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs, outputs)", "optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test with functional API", "= Input((5, 5)) c = Input((3,)) cell = RNNCellWithConstants(32) custom_objects", "support custom RNN cell yet') def test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer): def", "(N, t_1, t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False))", "layer = wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1')) _ = layer(x) assert", "not using batch_input_shape test_input = np.random.randint(5, size=(10, 3, 4), dtype='int32')", "assert not np.array_equal(td.get_weights()[2], np.array([0, 0])) assert not np.array_equal(td.get_weights()[3], np.array([1, 1]))", "stacked bidirectional layers model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode, input_shape=(timesteps,", "len(y_merged) == len(y_expected) for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1,", "set np.random.seed(1234) x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True)", "len(layer.get_updates_for(None)) == 0 assert len(layer.get_updates_for(x)) == 0 layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1,", "inputs. uid = object_list_uid(model.inputs) assert len(td._input_map.keys()) == 1 assert uid", "not np.array_equal(td.get_weights()[2], np.array([0, 0])) assert not np.array_equal(td.get_weights()[3], np.array([1, 1])) #", "first RNN layer ref_mask_val_2 = np.any(ref_mask_val_1, axis=-1) # second RNN", "not isinstance(input_shape, list): raise TypeError('expects constants shape') [input_shape, constant_shape] =", "basic case. x = Input((5, 5)) c = Input((3,)) cell", "2, 4, 4, 3)), np.random.random((1, 2, 4, 4, 5))) model", "Embedding model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10, 3, 4), dtype='int32'))", "reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3, 4), dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse')", "2 @pytest.mark.skipif((K.backend() == 'cntk' or K.backend() == 'mxnet'), reason='Unknown timestamps", "3, 2)), epochs=1, batch_size=10) # test config model.get_config() # test", "test_Bidirectional(): rnn = layers.SimpleRNN samples = 2 dim = 2", "2)) layer = wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1')) _ = layer(x)", "backend does not support TimeDistributed and RNN yet') def test_TimeDistributed_with_masking_layer():", "dtype='int32') test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential()", "0 layer.trainable = True assert len(layer.trainable_weights) == 6 def test_Bidirectional_updates():", "6 def test_Bidirectional_updates(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3))", "import pytest import numpy as np import copy from numpy.testing", "Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1,", "4 assert len(layer.get_losses_for(None)) == 4 assert len(layer.get_losses_for(x)) == 0 layer.forward_layer.add_loss(0,", "functional API inputs = Input((timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate,", "+ y_rev) / 2 elif merge_mode == 'concat': merge_func =", "a batch_input_shape test_input = np.random.random((1, 3, 4)) test_output = model.predict(test_input)", "= 2 dim = 2 timesteps = 2 output_dim =", "= Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3, 4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights)", "size=(10, 3, 4), dtype='int32') for i in range(4): model_input[i, i:,", "False assert len(layer.updates) == 0 assert len(layer.trainable_weights) == 0 layer.trainable", "@pytest.mark.skipif((K.backend() == 'cntk'), reason='Flaky with CNTK backend') def test_TimeDistributed_learning_phase(): #", "atol=1e-4) # test flat list inputs with CustomObjectScope(custom_objects): layer =", "inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, s_for,", "model.fit(np.random.randint(5, size=(10, 3, 4), dtype='int32'), np.random.random((10, 3, 4, 6)), epochs=1,", "+= [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func = K.function([model.input], mask_outputs) mask_outputs_val = func([model_input])", "== 'sum': merge_func = lambda y, y_rev: y + y_rev", "test passing invalid initial_state: passing a tensor input2 = Input((timesteps,", "s_fw_np + 10., s_bk_np + 10., c_np]) with pytest.raises(AssertionError): assert_allclose(y_np,", "does not support custom RNN cell yet') def test_Bidirectional_with_constants_layer_passing_initial_state(): class", "# test config model.get_config() # test when specifying a batch_input_shape", "3, 2))) assert_allclose(np.mean(y), 0., atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet", "0 model.fit(model_input, np.random.random((10, 1)), epochs=1, batch_size=10) mask_outputs = [model.layers[0].compute_mask(model.input)] for", "== (None, 2) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not", "3, 4), dtype='int32') test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference", "reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05)", "def call(self, inputs, states, constants): [prev_output] = states [constant] =", "# test flat list inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config))", "== len(y_expected) + n_states * 2 for x1, x2 in", "from inputs to reshaped inputs. uid = object_list_uid(model.inputs) assert len(td._input_map.keys())", "batch_size=10) # test wrapping Sequential model model = Sequential() model.add(layers.Dense(3,", "merge_mode == 'concat': merge_func = lambda y, y_rev: np.concatenate((y, y_rev),", "RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, constants=c)", "= layer(x) assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2", "== 0 assert len(layer.get_updates_for(None)) == 0 assert len(layer.get_updates_for(x)) == 0", "assert_allclose(y_np, y_np_2, atol=1e-4) # test flat list inputs with CustomObjectScope(custom_objects):", "0 layer.trainable = True assert len(layer.updates) == 2 assert len(layer.trainable_weights)", "input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10,", "len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.updates) == 2", "layer.backward_layer.add_loss(1, inputs=None) assert len(layer.losses) == 8 assert len(layer.get_losses_for(None)) == 6", "self.units = units self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self,", "atol=1e-4) # verify that state is used y_np_2_different_s = model.predict([x_np,", "s_bac, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, s_fw_np, s_bk_np, c_np])", "= Input((timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model", "4, 4, 5))) model = model_from_json(model.to_json()) model.summary() # test stacked", ") # Test basic case serialization. x_np = np.random.random((6, 5,", "passed self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel =", "model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test", "model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3,", "and RNN yet') def test_TimeDistributed(): # first, test with Dense", "4 assert len(layer.get_losses_for(x)) == 0 layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0,", "mask_outputs[:-1]) mask_outputs_val = func([model_input]) ref_mask_val_0 = model_input > 0 #", "Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3,", "batch_size=10) mask_outputs = [model.layers[0].compute_mask(model.input)] for layer in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1]))", "model.fit(model_input, np.random.random((10, 1)), epochs=1, batch_size=10) mask_outputs = [model.layers[0].compute_mask(model.input)] for layer", "np.zeros((6, 32)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test basic", "a tensor input2 = Input((timesteps, dim)) with pytest.raises(ValueError): output =", "layer = wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates) == 0 assert len(layer.get_updates_for(None)) ==", "test if the state of a BiRNN is the concatenation", "backend does not support TimeDistributed and RNN yet') @pytest.mark.skipif((K.backend() ==", "layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, constants=c) model = Model([x,", "return output, [output] def get_config(self): config = {'units': self.units} base_config", "'cntk' or K.backend() == 'mxnet'), reason='Unknown timestamps for RNN not", "mode == 'concat' else output_dim y = np.random.random((samples, target_dim)) inputs", "def test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units =", "model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3,", "if mode == 'concat' else output_dim y = np.random.random((samples, target_dim))", "Input((3,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects):", "model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse')", "== 0 assert len(layer.trainable_weights) == 0 layer.trainable = True assert", "= Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3,", "= [model.layers[0].compute_mask(model.input)] mask_outputs += [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func = K.function([model.input], mask_outputs)", "model = Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2), padding='same'), input_shape=(2, 4, 4,", "= units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape): if not isinstance(input_shape,", "y_rev: y * y_rev elif merge_mode == 'ave': merge_func =", "timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs) model = Model(inputs,", "if the state of a BiRNN is the concatenation of", "= lambda y, y_rev: y + y_rev elif merge_mode ==", "x2, atol=1e-5) # test return_state inputs = Input((timesteps, dim)) layer", "CNTK and MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test with unspecified shape", "in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func = K.function([model.input], mask_outputs[:-1]) mask_outputs_val =", "= lambda y, y_rev: y * y_rev elif merge_mode ==", "f_forward = K.function([inputs], layer.forward_layer.call(inputs)) f_backward = K.function([inputs], layer.backward_layer.call(inputs)) n_states =", "np.array_equal(td.get_weights()[2], np.array([0, 0])) assert np.array_equal(td.get_weights()[3], np.array([1, 1])) # Train model.train_on_batch(np.random.normal(loc=2,", "inputs=x) layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1, inputs=None) assert len(layer.losses) ==", "Bidirectional and stateful inputs = Input(batch_shape=(1, timesteps, dim)) outputs =", "RNN cell yet') def test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer): def __init__(self, units,", "== 4 assert len(layer.get_losses_for(x)) == 0 layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1, inputs=None)", "+ list(config.items())) # Test basic case. x = Input((5, 5))", "c_np = np.random.random((6, 3)) y_np = model.predict([x_np, s_fw_np, s_bk_np, c_np])", "= lambda y, y_rev: np.concatenate((y, y_rev), axis=-1) else: merge_func =", "np.random.random((samples, target_dim)) # test with Sequential model model = Sequential()", "assert len(layer.get_updates_for(None)) == 0 assert len(layer.get_updates_for(x)) == 0 layer.forward_layer.add_update(0, inputs=x)", "timesteps = 3 units = 3 X = [np.random.rand(samples, timesteps,", "assert len(layer.trainable_weights) == 6 def test_Bidirectional_updates(): x = Input(shape=(3, 2))", "atol=1e-4) def test_Bidirectional_trainable(): # test layers that need learning_phase to", "not support TimeDistributed and RNN yet') def test_TimeDistributed(): # first,", "2, 4, 4, 5))) model = model_from_json(model.to_json()) model.summary() # test", "np.random.random((6, 5, 5)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np,", "loss='mse') # Assert that mean and variance are 0 and", "len(layer.losses) == 8 assert len(layer.get_losses_for(None)) == 6 assert len(layer.get_losses_for(x)) ==", "= wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) # test valid usage: passing a list", "Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode) f_merged = K.function([inputs],", "uid in td._input_map assert K.int_shape(td._input_map[uid]) == (None, 2) @pytest.mark.skipif(K.backend() ==", "CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, initial_state=[s_for, s_bac], constants=c)", "= Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps, dim))) model.compile(loss='mse', optimizer='sgd')", "def test_Bidirectional_losses(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional( layers.SimpleRNN(3,", "# Test basic case serialization. x_np = np.random.random((6, 5, 5))", "np.random.randint(low=1, high=5, size=(10, 3, 4)) for i in range(4): model_input[i,", "RNN layer ref_mask_val = [ref_mask_val_0, ref_mask_val_1, ref_mask_val_2] for i in", "bidirectional layers model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode, input_shape=(timesteps, dim)))", "Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode) outputs =", "= [np.random.rand(samples, timesteps, dim), np.random.rand(samples, timesteps, dim)] outputs = model.predict(inputs)", "64)) ) # Test basic case serialization. x_np = np.random.random((6,", "_ = layer(x) assert len(layer.losses) == 4 assert len(layer.get_losses_for(None)) ==", "the underlying RNNs y_merged = y_merged[-n_states * 2:] y_forward =", "def test_Bidirectional_trainable(): # test layers that need learning_phase to be", "layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse')", "not support custom RNN cell yet') def test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer):", "Input((3,)) s_for = Input((32,)) s_bac = Input((32,)) cell = RNNCellWithConstants(32)", "test with functional API inputs = Input((timesteps, dim)) outputs =", "inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode) f_merged", "# Assert that mean and variance changed. assert not np.array_equal(td.get_weights()[2],", "= to_list(model.predict(X)) for x1, x2 in zip(y1, y2): assert_allclose(x1, x2,", "test_Bidirectional_updates(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates)", "layer = wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True)) state = layer(input1)[1:] # test", "loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4), dtype='int32') for", "y_rev] # basic case inputs = Input((timesteps, dim)) layer =", "np.random.random((6, 5, 5)) s_fw_np = np.random.random((6, 32)) s_bk_np = np.random.random((6,", "f_backward(X) y_expected = to_list(merge_func(y_forward[0], y_backward[0])) assert len(y_merged) == len(y_expected) +", "units = 3 X = [np.random.rand(samples, timesteps, dim)] if merge_mode", "1]), (1, 10, 2))) # Assert that mean and variance", "# test layers that need learning_phase to be set np.random.seed(1234)", "4), dtype='int32') for i in range(4): model_input[i, i:, i:] =", "# test with functional API inputs = Input((timesteps, dim)) outputs", "CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, c]) model =", "'cntk'), reason='Unknown timestamps not supported in CNTK.') def test_Bidirectional_dynamic_timesteps(): #", "layer.trainable = False assert len(layer.trainable_weights) == 0 layer.trainable = True", "== 1 assert len(model.layers[0].get_losses_for(None)) == 1 assert len(model.losses) == 1", "= wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode) f_merged = K.function([inputs], layer(inputs)) f_forward =", "6 layer.trainable = False assert len(layer.trainable_weights) == 0 layer.trainable =", "assert not np.array_equal(td.get_weights()[3], np.array([1, 1])) # Verify input_map has one", "assert np.array_equal(td.get_weights()[3], np.array([1, 1])) # Train model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10,", "@pytest.mark.parametrize('merge_mode', ['sum', 'concat', None]) def test_Bidirectional_dropout(merge_mode): rnn = layers.LSTM samples", "np.random.random((6, 3)) y_np = model.predict([x_np, c_np]) weights = model.get_weights() config", "32)) s_bk_np = np.random.random((6, 32)) c_np = np.random.random((6, 3)) y_np", "model.fit(model_input, np.random.random((10, 3, 5)), epochs=1, batch_size=6) mask_outputs = [model.layers[0].compute_mask(model.input)] mask_outputs", "6)), epochs=1, batch_size=10) # compare to not using batch_input_shape test_input", "= wrappers.Bidirectional(RNN(cell)) y = layer(x, constants=c) model = Model([x, c],", "model.summary() # test stacked layers model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3,", "high=5, size=(10, 3, 4)) for i in range(4): model_input[i, i:,", "return_sequences=True)) state = layer(input1)[1:] # test passing invalid initial_state: passing", "layer(x) assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 layer.trainable", "layer = wrappers.Bidirectional(layers.SimpleRNN(3)) _ = layer(x) assert len(layer.trainable_weights) == 6", "= Model([x, s_for, s_bac, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6,", "that mean and variance are 0 and 1. td =", "layer.trainable = True assert len(layer.trainable_weights) == 6 def test_Bidirectional_updates(): x", "model_input = np.random.randint(low=1, high=5, size=(10, 3, 4), dtype='int32') for i", "[model.layers[0].compute_mask(model.input)] for layer in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func = K.function([model.input],", "K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged = f_merged(X)", "import layers from keras.models import Sequential, Model, model_from_json from keras", "= Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10,", "concatenation of the underlying RNNs y_merged = y_merged[-n_states * 2:]", "one constant passed self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel')", "input_shape=(2, 4, 4, 3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1, 2, 4,", "y_merged = y_merged[-n_states * 2:] y_forward = y_forward[-n_states:] y_backward =", "= model.predict([x_np, s_fw_np, s_bk_np, c_np]) weights = model.get_weights() config =", "outer_model = Model(x, y) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10,", "and 1. td = model.layers[0] assert np.array_equal(td.get_weights()[2], np.array([0, 0])) assert", "return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10,", "Assert that mean and variance are 0 and 1. td", "assert len(layer.get_updates_for(x)) == 0 layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0, inputs=x)", "== 8 assert len(layer.get_losses_for(None)) == 6 assert len(layer.get_losses_for(x)) == 2", "model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func = K.function([model.input], mask_outputs[:-1]) mask_outputs_val = func([model_input])", "= np.random.random((1, 3, 4)) test_output = model.predict(test_input) weights = model.layers[0].get_weights()", "to be set x = Input(shape=(3, 2)) layer = wrappers.TimeDistributed(layers.BatchNormalization())", "states, constants): [prev_output] = states [constant] = constants h_input =", "input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)),", "# test return_state inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units,", "is the concatenation of the underlying RNNs y_merged = y_merged[-n_states", "y_backward[0])) assert len(y_merged) == len(y_expected) + n_states * 2 for", "layer.trainable = False assert len(layer.updates) == 0 assert len(layer.trainable_weights) ==", "in outputs) inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2,", "K.function([model.input], mask_outputs) mask_outputs_val = func([model_input]) assert np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1)) assert", "test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units = units", "self.constant_kernel) output = h_input + h_state + h_const return output,", "s_for, s_bac, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, s_fw_np, s_bk_np,", "== 6 def test_Bidirectional_updates(): x = Input(shape=(3, 2)) layer =", "for x1, x2 in zip(y1, y2): assert_allclose(x1, x2, atol=1e-5) def", "Train model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10, 2)), np.broadcast_to(np.array([0, 1]), (1, 10,", "np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test wrapping Sequential model", "with pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s, atol=1e-4) # test flat list inputs", "BatchNormalization model = Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True), name='bn', input_shape=(10, 2)))", "loss='mse') model.fit(model_input, np.random.random((10, 3, 5)), epochs=1, batch_size=6) mask_outputs = [model.layers[0].compute_mask(model.input)]", "assert len(model.layers[0].layer.losses) == 1 assert len(model.layers[0].losses) == 1 assert len(model.layers[0].get_losses_for(None))", "= layer([x, c]) model = Model([x, c], y) model.set_weights(weights) y_np_3", "name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built =", "model = Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True), name='bn', input_shape=(10, 2))) model.compile(optimizer='rmsprop',", "layers model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop',", "wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode) outputs = to_list(wrapped(inputs)) assert", "that need learning_phase to be set np.random.seed(1234) x = Input(shape=(3,", "recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y,", "3 units = 3 X = [np.random.rand(samples, timesteps, dim)] inputs", "dim)) with pytest.raises(ValueError): output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) # test valid", "that mean and variance changed. assert not np.array_equal(td.get_weights()[2], np.array([0, 0]))", "= 2 * output_dim if mode == 'concat' else output_dim", "bias_regularizer='l1')) _ = layer(x) assert len(layer.losses) == 4 assert len(layer.get_losses_for(None))", "not support TimeDistributed and RNN yet') def test_TimeDistributed_with_masking_layer(): # test", "# first RNN layer ref_mask_val_2 = np.any(ref_mask_val_1, axis=-1) # second", "model.compile(optimizer='rmsprop', loss='mse') # Assert that mean and variance are 0", "y_rev elif merge_mode == 'ave': merge_func = lambda y, y_rev:", "= wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model =", "model.summary() # test stacked bidirectional layers model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim,", "Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3, 4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output", "* 2:] y_forward = y_forward[-n_states:] y_backward = y_backward[-n_states:] for state_birnn,", "y, epochs=1, batch_size=1) @pytest.mark.skipif((K.backend() == 'cntk'), reason='Unknown timestamps not supported", "with pytest.raises(ValueError): output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) # test valid usage:", "[np.zeros((6, 5, 5)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test", "= Model(x, y) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3,", "Test basic case serialization. x_np = np.random.random((6, 5, 5)) s_fw_np", "np.random.random((6, 32)) s_bk_np = np.random.random((6, 32)) c_np = np.random.random((6, 3))", "as K from keras.utils.generic_utils import object_list_uid, to_list @pytest.mark.skipif(K.backend() == 'mxnet',", "mask_zero model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True), input_shape=(None, None))) #", "reference_output, atol=1e-05) # test with Conv2D model = Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5,", "assert np.array_equal(td.get_weights()[2], np.array([0, 0])) assert np.array_equal(td.get_weights()[3], np.array([1, 1])) # Train", "y = layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for,", "K from keras.utils.generic_utils import object_list_uid, to_list @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet", "dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode) outputs = to_list(wrapped(inputs,", "keras.layers import RNN from keras import layers from keras.models import", "input_shape=(3, 4), dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output,", "model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2), padding='same'), input_shape=(2, 4, 4, 3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop',", "np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert mask_outputs[-1] is None # final layer @pytest.mark.skipif(K.backend()", "passing a tensor input2 = Input((timesteps, dim)) with pytest.raises(ValueError): output", "y, epochs=1, batch_size=1) # Bidirectional and stateful inputs = Input(batch_shape=(1,", "c_np = np.random.random((6, 3)) y_np = model.predict([x_np, c_np]) weights =", "RNN yet') @pytest.mark.skipif((K.backend() == 'cntk'), reason='Flaky with CNTK backend') def", "len(y_expected) for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5)", "test_TimeDistributed_learning_phase(): # test layers that need learning_phase to be set", "= Input((5, 5)) c = Input((3,)) s_for = Input((32,)) s_bac", "from numpy.testing import assert_allclose from keras.utils import CustomObjectScope from keras.layers", "model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True), name='bn', input_shape=(10, 2))) model.compile(optimizer='rmsprop', loss='mse') # Assert", "== 4 assert len(layer.get_losses_for(None)) == 4 assert len(layer.get_losses_for(x)) == 0", "in CNTK and MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test with unspecified", "= Input((None, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model", "outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.skipif((K.backend() == 'cntk'),", "model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test", "CNTK backend') def test_TimeDistributed_learning_phase(): # test layers that need learning_phase", "be set np.random.seed(1234) x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(layers.Dropout(.999))(x,", "outputs = to_list(wrapped(inputs)) assert all(x._uses_learning_phase for x in outputs) model", "= object_list_uid(model.inputs) assert len(td._input_map.keys()) == 1 assert uid in td._input_map", "return_state inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode)", "len(y_merged) == len(y_expected) + n_states * 2 for x1, x2", "x2 in zip(y1, y2): assert_allclose(x1, x2, atol=1e-5) def test_Bidirectional_state_reuse(): rnn", "loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test", "far: (N, t_1, t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1,", "dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) #", "layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1, inputs=None) assert len(layer.losses)", "loss='mse') assert len(model.layers[0].layer.losses) == 1 assert len(model.layers[0].losses) == 1 assert", "Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert", "batch_size=1) # Bidirectional and stateful inputs = Input(batch_shape=(1, timesteps, dim))", "+ n_states * 2 for x1, x2 in zip(y_merged, y_expected):", "Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10,", "y_rev), axis=-1) else: merge_func = lambda y, y_rev: [y, y_rev]", "@pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support custom RNN", "rnn = layers.SimpleRNN samples = 2 dim = 2 timesteps", "import assert_allclose from keras.utils import CustomObjectScope from keras.layers import wrappers,", "atol=1e-05) # test with Conv2D model = Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2,", "# test with functional API x = Input(shape=(3, 2)) y", "RNN yet') def test_TimeDistributed_with_masking_layer(): # test with Masking layer model", "model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1, 2, 4, 4, 3)), np.random.random((1, 2,", "as np import copy from numpy.testing import assert_allclose from keras.utils", "import CustomObjectScope from keras.layers import wrappers, Input, Layer from keras.layers", "test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units = units", "model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3,", "= Model(x, y) y = model.predict(np.random.random((10, 3, 2))) assert_allclose(np.mean(y), 0.,", "3, 3)), epochs=1, batch_size=10) # test wrapping Sequential model model", "'concat' else output_dim y = np.random.random((samples, target_dim)) # test with", "supported.') @pytest.mark.parametrize('merge_mode', ['sum', 'concat', None]) def test_Bidirectional_dropout(merge_mode): rnn = layers.LSTM", "Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True)) state = layer(input1)[1:]", "model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5,", "model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3, 4), dtype='int32')) reference.compile(optimizer='rmsprop',", "3, 4), dtype='int32'), np.random.random((10, 3, 4, 6)), epochs=1, batch_size=10) #", "np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1)) assert np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1)) def test_regularizers(): model", "3 units = 3 X = [np.random.rand(samples, timesteps, dim)] if", "to_list(merge_func(y_forward[0], y_backward[0])) assert len(y_merged) == len(y_expected) + n_states * 2", "test flat list inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y", "reason='Not supported.') @pytest.mark.parametrize('merge_mode', ['sum', 'concat', None]) def test_Bidirectional_dropout(merge_mode): rnn =", "backend does not support TimeDistributed and RNN yet') def test_TimeDistributed():", "batch_input_shape=(1, 3, 4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input)", "recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps, dim))) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1)", "[ref_mask_val_0, ref_mask_val_1, ref_mask_val_2] for i in range(3): assert np.array_equal(mask_outputs_val[i], ref_mask_val[i])", "assert len(layer.trainable_weights) == 2 layer.trainable = False assert len(layer.updates) ==", "with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, initial_state=[s_for, s_bac],", "name='constant_kernel') self.built = True def call(self, inputs, states, constants): [prev_output]", "c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) def test_Bidirectional_trainable(): # test layers that", "= np.random.randint(low=1, high=5, size=(10, 3, 4)) for i in range(4):", "x_np = np.random.random((6, 5, 5)) c_np = np.random.random((6, 3)) y_np", "length rnn = layers.SimpleRNN samples = 2 dim = 2", "= Sequential() model.add(layers.Dense(3, input_dim=2)) outer_model = Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2)))", "merge_func = lambda y, y_rev: np.concatenate((y, y_rev), axis=-1) else: merge_func", "model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop',", "y_backward = f_backward(X) y_expected = to_list(merge_func(y_forward[0], y_backward[0])) assert len(y_merged) ==", "= wrappers.Bidirectional(RNN(cell)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model =", "= False assert len(layer.updates) == 0 assert len(layer.trainable_weights) == 0", "elif merge_mode == 'mul': merge_func = lambda y, y_rev: y", "output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) # test valid usage: passing a", "n_states = len(layer.layer.states) y_merged = f_merged(X) y_forward = f_forward(X) y_backward", "5)) c = Input((3,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants':", "layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model", "5, 5)) s_fw_np = np.random.random((6, 32)) s_bk_np = np.random.random((6, 32))", "Sequential() model.add(layers.Dense(3, input_dim=2)) outer_model = Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop',", "atol=1e-5) # test if the state of a BiRNN is", "the concatenation of the underlying RNNs y_merged = y_merged[-n_states *", "= layer(x) assert len(layer.trainable_weights) == 6 layer.trainable = False assert", "API x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(model)(x) outer_model =", "merge_mode == 'sum': merge_func = lambda y, y_rev: y +", "scale=2, size=(1, 10, 2)), np.broadcast_to(np.array([0, 1]), (1, 10, 2))) #", "**kwargs): self.units = units self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs) def", "config = layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y =", "not np.array_equal(td.get_weights()[3], np.array([1, 1])) # Verify input_map has one mapping", "size=(10, 3, 4), dtype='int32'), np.random.random((10, 3, 4, 6)), epochs=1, batch_size=10)", "y = np.random.random((samples, target_dim)) inputs = Input((None, dim)) outputs =", "rtol=1e-1) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed", "np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test with BatchNormalization model", "s_bk_np = np.random.random((6, 32)) c_np = np.random.random((6, 3)) y_np =", "= y_backward[-n_states:] for state_birnn, state_inner in zip(y_merged, y_forward + y_backward):", "3, 4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output,", "first, test with Dense layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3,", "= wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd')", "+ y_backward): assert_allclose(state_birnn, state_inner, atol=1e-5) @pytest.mark.skipif(K.backend() == 'theano' or K.backend()", "5 timesteps = 3 units = 3 X = [np.random.rand(samples,", "dim)) outputs = wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs) model = Model(inputs, outputs)", "when specifying a batch_input_shape test_input = np.random.random((1, 3, 4)) test_output", "dim), np.random.rand(samples, timesteps, dim)] outputs = model.predict(inputs) @pytest.mark.skipif(K.backend() == 'mxnet',", "== 'mxnet', reason='Not supported.') @pytest.mark.parametrize('merge_mode', ['sum', 'concat', None]) def test_Bidirectional_dropout(merge_mode):", "layers from keras.models import Sequential, Model, model_from_json from keras import", "K.function([model.input], mask_outputs[:-1]) mask_outputs_val = func([model_input]) ref_mask_val_0 = model_input > 0", "with dynamic length rnn = layers.SimpleRNN samples = 2 dim", "1 assert len(model.layers[0].get_losses_for(None)) == 1 assert len(model.losses) == 1 model", "list) inputs = [np.random.rand(samples, timesteps, dim), np.random.rand(samples, timesteps, dim)] outputs", "wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode) outputs = to_list(wrapped(inputs)) assert all(x._uses_learning_phase for", "self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built = True", "with BatchNormalization model = Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True), name='bn', input_shape=(10,", "= Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True), input_shape=(None, None))) # the shape", "np.array_equal(td.get_weights()[3], np.array([1, 1])) # Train model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10, 2)),", "y_rev elif merge_mode == 'mul': merge_func = lambda y, y_rev:", "model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum', 'mul', 'ave',", "layer in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func = K.function([model.input], mask_outputs[:-1]) mask_outputs_val", "[prev_output] = states [constant] = constants h_input = K.dot(inputs, self.input_kernel)", "0 layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1, inputs=None) assert", "import Sequential, Model, model_from_json from keras import backend as K", "support TimeDistributed and RNN yet') def test_TimeDistributed_with_masking_layer(): # test with", "test_Bidirectional_merged_value(merge_mode): rnn = layers.LSTM samples = 2 dim = 5", "keras.models import Sequential, Model, model_from_json from keras import backend as", "Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, s_fw_np,", "K.function([inputs], layer(inputs)) f_forward = K.function([inputs], layer.forward_layer.call(inputs)) f_backward = K.function([inputs], layer.backward_layer.call(inputs))", "test layers that need learning_phase to be set np.random.seed(1234) x", "not supported in CNTK and MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test", "lambda y, y_rev: y + y_rev elif merge_mode == 'mul':", "list(config.items())) # Test basic case. x = Input((5, 5)) c", "y_merged = f_merged(X) y_forward = f_forward(X) y_backward = f_backward(X) y_expected", "== 'concat' else output_dim y = np.random.random((samples, target_dim)) inputs =", "= wrappers.TimeDistributed(layers.BatchNormalization()) _ = layer(x) assert len(layer.updates) == 2 assert", "Embeddings with mask_zero model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True), input_shape=(None,", "loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 3))], np.zeros((6, 64)) )", "wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, s_for, s_bac, c]) model = Model([x,", "in zip(y1, y2): assert_allclose(x1, x2, atol=1e-5) def test_Bidirectional_state_reuse(): rnn =", "len(model.losses) == 1 def test_Bidirectional(): rnn = layers.SimpleRNN samples =", "== 2 layer.trainable = False assert len(layer.updates) == 0 assert", "Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, s_fw_np,", "model.fit(x, y, epochs=1, batch_size=1) # test with functional API inputs", "assert len(layer.updates) == 0 assert len(layer.trainable_weights) == 0 layer.trainable =", "RNN yet') def test_TimeDistributed_trainable(): # test layers that need learning_phase", "assert len(model.losses) == 1 model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'),", "assert len(layer.get_updates_for(None)) == 2 assert len(layer.get_updates_for(x)) == 2 def test_Bidirectional_losses():", "Model, model_from_json from keras import backend as K from keras.utils.generic_utils", "3)), epochs=1, batch_size=10) # test wrapping Sequential model model =", "2))) assert_allclose(np.mean(y), 0., atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend", "support TimeDistributed and RNN yet') def test_TimeDistributed_trainable(): # test layers", "5))) model = model_from_json(model.to_json()) model.summary() # test stacked layers model", "state_birnn, state_inner in zip(y_merged, y_forward + y_backward): assert_allclose(state_birnn, state_inner, atol=1e-5)", "y_np_2_different_s = model.predict([x_np, s_fw_np + 10., s_bk_np + 10., c_np])", "y_forward + y_backward): assert_allclose(state_birnn, state_inner, atol=1e-5) @pytest.mark.skipif(K.backend() == 'theano' or", "y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 3))], np.zeros((6,", "len(layer.trainable_weights) == 6 layer.trainable = False assert len(layer.trainable_weights) == 0", "mask_outputs += [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func = K.function([model.input], mask_outputs) mask_outputs_val =", "API with dynamic length rnn = layers.SimpleRNN samples = 2", "np.random.random((10, 1)), epochs=1, batch_size=10) mask_outputs = [model.layers[0].compute_mask(model.input)] for layer in", "passing invalid initial_state: passing a tensor input2 = Input((timesteps, dim))", "from keras import backend as K from keras.utils.generic_utils import object_list_uid,", "model.add(layers.Dense(3, input_dim=2)) outer_model = Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop', loss='mse')", "Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True), input_shape=(None, None))) # the shape so", "Input(shape=(3, 2)) layer = wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1')) _ =", "of the underlying RNNs y_merged = y_merged[-n_states * 2:] y_forward", "numpy as np import copy from numpy.testing import assert_allclose from", "dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode) outputs = to_list(wrapped(inputs))", "yet') def test_TimeDistributed(): # first, test with Dense layer model", "to_list(wrapped(inputs)) assert all(x._uses_learning_phase for x in outputs) model = Model(inputs,", "layers.SimpleRNN samples = 2 dim = 2 timesteps = 2", "dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode) outputs = to_list(wrapped(inputs, training=True)) assert all(not getattr(x,", "x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test if", "size=(10, 3, 4), dtype='int32') test_output = model.predict(test_input) weights = model.layers[0].get_weights()", "to be set x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3))", "layers.LSTM samples = 2 dim = 5 timesteps = 3", "model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode, input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse', optimizer='sgd') model.fit(x,", "RNN cell yet') def test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer): def __init__(self, units,", "assert_allclose(y_np, y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not", "s_bac, c]) model = Model([x, s_for, s_bac, c], y) model.set_weights(weights)", "assert_allclose(y_np, y_np_2, atol=1e-4) # verify that state is used y_np_2_different_s", "= constants h_input = K.dot(inputs, self.input_kernel) h_state = K.dot(prev_output, self.recurrent_kernel)", "5 timesteps = 3 units = 3 input1 = Input((timesteps,", "len(layer.get_updates_for(x)) == 0 layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1,", "2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates) == 0 assert len(layer.get_updates_for(None))", "1)), epochs=1, batch_size=10) mask_outputs = [model.layers[0].compute_mask(model.input)] for layer in model.layers[1:]:", "# test when specifying a batch_input_shape test_input = np.random.random((1, 3,", "None))) # the shape so far: (N, t_1, t_2, 6)", "stateful=True), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y,", "pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s, atol=1e-4) # test flat list inputs with", "and MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test with unspecified shape and", "len(layer.losses) == 4 assert len(layer.get_losses_for(None)) == 4 assert len(layer.get_losses_for(x)) ==", "layer @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed", "model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1)", "basic case serialization. x_np = np.random.random((6, 5, 5)) c_np =", "shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built = True def call(self, inputs,", "10., s_bk_np + 10., c_np]) with pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s, atol=1e-4)", "s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) def test_Bidirectional_trainable(): # test", "4)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test wrapping Sequential", "= 5 timesteps = 3 units = 3 input1 =", "is used y_np_2_different_s = model.predict([x_np, s_fw_np + 10., s_bk_np +", "axis=-1) else: merge_func = lambda y, y_rev: [y, y_rev] #", "used y_np_2_different_s = model.predict([x_np, s_fw_np + 10., s_bk_np + 10.,", "'ave', 'concat', None]) def test_Bidirectional_merged_value(merge_mode): rnn = layers.LSTM samples =", "= self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built = True def", "test_input = np.random.random((1, 3, 4)) test_output = model.predict(test_input) weights =", "y = wrappers.TimeDistributed(model)(x) outer_model = Model(x, y) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10,", "'_uses_learning_phase') for x in outputs) inputs = Input((timesteps, dim)) wrapped", "assert np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert mask_outputs[-1] is None # final layer", "K.int_shape(td._input_map[uid]) == (None, 2) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does", "model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5,", "def build(self, input_shape): if not isinstance(input_shape, list): raise TypeError('expects constants", "test stacked layers model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3)))", "6 assert len(layer.get_losses_for(x)) == 2 if __name__ == '__main__': pytest.main([__file__])", "t_1, t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop',", "output_dim = 2 dropout_rate = 0.2 for mode in ['sum',", "CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, constants=c) model =", "y, y_rev: np.concatenate((y, y_rev), axis=-1) else: merge_func = lambda y,", "1 assert len(model.layers[0].losses) == 1 assert len(model.layers[0].get_losses_for(None)) == 1 assert", "= Input((3,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with", "s_for, s_bac, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)),", "timesteps, dim)] if merge_mode == 'sum': merge_func = lambda y,", "5)) c = Input((3,)) s_for = Input((32,)) s_bac = Input((32,))", "output_dim if mode == 'concat' else output_dim y = np.random.random((samples,", "all(x._uses_learning_phase for x in outputs) model = Model(inputs, outputs) assert", "y_expected = to_list(merge_func(y_forward[0], y_backward[0])) assert len(y_merged) == len(y_expected) + n_states", "reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3, 4), dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output =", "3))], np.zeros((6, 64)) ) # Test basic case serialization. x_np", "y_forward[-n_states:] y_backward = y_backward[-n_states:] for state_birnn, state_inner in zip(y_merged, y_forward", "shape') [input_shape, constant_shape] = input_shape # will (and should) raise", "training=True)) assert all(not getattr(x, '_uses_learning_phase') for x in outputs) inputs", "== 'cntk'), reason='Flaky with CNTK backend') def test_TimeDistributed_learning_phase(): # test", "model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input, np.random.random((10, 3, 5)), epochs=1, batch_size=6) mask_outputs =", "len(layer.get_updates_for(None)) == 2 assert len(layer.get_updates_for(x)) == 2 def test_Bidirectional_losses(): x", "reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Embedding", "model.get_config() model = model_from_json(model.to_json()) model.summary() # test stacked bidirectional layers", "input1 = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True)) state", "test with functional API with dynamic length rnn = layers.SimpleRNN", "c_np]) weights = model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects): layer", "padding='same'), input_shape=(2, 4, 4, 3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1, 2,", "initial_state: passing a tensor input2 = Input((timesteps, dim)) with pytest.raises(ValueError):", "s_bac = Input((32,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants}", "stacked layers model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu'))", "= wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, s_for, s_bac, c]) model =", "np.random.randint(low=1, high=5, size=(10, 3, 4), dtype='int32') for i in range(4):", "len(layer.get_updates_for(x)) == 2 def test_Bidirectional_losses(): x = Input(shape=(3, 2)) layer", "= layer(x, constants=c) model = Model([x, c], y) model.set_weights(weights) y_np_2", "model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4)) for", "= K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged =", "inputs = [np.random.rand(samples, timesteps, dim), np.random.rand(samples, timesteps, dim)] outputs =", "/ 2 elif merge_mode == 'concat': merge_func = lambda y,", "model.predict([x_np, s_fw_np + 10., s_bk_np + 10., c_np]) with pytest.raises(AssertionError):", "epochs=1, batch_size=1) # test with functional API inputs = Input((timesteps,", "yet') def test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units", "atol=1e-5) @pytest.mark.skipif(K.backend() == 'theano' or K.backend() == 'mxnet', reason='Not supported.')", "= 3 units = 3 input1 = Input((timesteps, dim)) layer", "need learning_phase to be set x = Input(shape=(3, 2)) layer", "Model([x, s_for, s_bac, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5,", "Assert that mean and variance changed. assert not np.array_equal(td.get_weights()[2], np.array([0,", "1 model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu'))", "state_inner in zip(y_merged, y_forward + y_backward): assert_allclose(state_birnn, state_inner, atol=1e-5) @pytest.mark.skipif(K.backend()", "y = layer([x, c]) model = Model([x, c], y) model.set_weights(weights)", "Input(shape=(3, 2)) y = wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model = Model(x, y)", "# compare to not using batch_input_shape test_input = np.random.randint(5, size=(10,", "{'units': self.units} base_config = super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) + list(config.items()))", "case serialization. x_np = np.random.random((6, 5, 5)) s_fw_np = np.random.random((6,", "+ 10., c_np]) with pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s, atol=1e-4) # test", "model = Model([x, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, c_np])", "model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10, 2)), np.broadcast_to(np.array([0, 1]), (1, 10, 2)))", "model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10, 3, 4), dtype='int32')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5, size=(10,", "y_backward): assert_allclose(state_birnn, state_inner, atol=1e-5) @pytest.mark.skipif(K.backend() == 'theano' or K.backend() ==", "y_np_2 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) #", "np.array([0, 0])) assert not np.array_equal(td.get_weights()[3], np.array([1, 1])) # Verify input_map", "1)]) y_merged = f_merged(X) y_expected = to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert len(y_merged)", "super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape): if not isinstance(input_shape, list): raise", "= Input((3,)) s_for = Input((32,)) s_bac = Input((32,)) cell =", "batch_input_shape test_input = np.random.randint(5, size=(10, 3, 4), dtype='int32') test_output =", "= Input(shape=(3, 2)) y = wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model = Model(x,", "initial_state=state[0]) # test valid usage: passing a list output =", "model.set_weights(weights) y_np_2 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # test", "test_Bidirectional_dynamic_timesteps(): # test with functional API with dynamic length rnn", "td._input_map assert K.int_shape(td._input_map[uid]) == (None, 2) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet", "in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test return_state inputs", "assert_allclose(x1, x2, atol=1e-5) # test return_state inputs = Input((timesteps, dim))", "= model.predict([x_np, s_fw_np + 10., s_bk_np + 10., c_np]) with", "== 'ave': merge_func = lambda y, y_rev: (y + y_rev)", "Input((32,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects):", "if merge_mode == 'sum': merge_func = lambda y, y_rev: y", "zip(y_merged, y_forward + y_backward): assert_allclose(state_birnn, state_inner, atol=1e-5) @pytest.mark.skipif(K.backend() == 'theano'", "== 2 assert len(layer.trainable_weights) == 2 @pytest.mark.skipif((K.backend() == 'cntk' or", "for i in range(4): model_input[i, i:, i:] = 0 model.fit(model_input,", "y_forward = f_forward(X) y_backward = f_backward(X) y_expected = to_list(merge_func(y_forward[0], y_backward[0]))", "# test stacked bidirectional layers model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True),", "Verify input_map has one mapping from inputs to reshaped inputs.", "rnn = layers.LSTM samples = 2 dim = 5 timesteps", "model.layers[0] assert np.array_equal(td.get_weights()[2], np.array([0, 0])) assert np.array_equal(td.get_weights()[3], np.array([1, 1])) #", "to_list(model.predict(X)) for x1, x2 in zip(y1, y2): assert_allclose(x1, x2, atol=1e-5)", "K.function([inputs], layer.backward_layer.call(inputs)) n_states = len(layer.layer.states) y_merged = f_merged(X) y_forward =", "3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1, 2, 4, 4, 3)), np.random.random((1,", "unspecified shape and Embeddings with mask_zero model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5,", "ref_mask_val_1 = ref_mask_val_0 # first RNN layer ref_mask_val_2 = np.any(ref_mask_val_1,", "4, 5))) model = model_from_json(model.to_json()) model.summary() # test stacked layers", "= reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Embedding model", "input_shape # will (and should) raise if more than one", "np.array([0, 0])) assert np.array_equal(td.get_weights()[3], np.array([1, 1])) # Train model.train_on_batch(np.random.normal(loc=2, scale=2,", "c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4)", "x_np = np.random.random((6, 5, 5)) s_fw_np = np.random.random((6, 32)) s_bk_np", "backend as K from keras.utils.generic_utils import object_list_uid, to_list @pytest.mark.skipif(K.backend() ==", "= model.predict(np.random.random((10, 3, 2))) assert_allclose(np.mean(y), 0., atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend() ==", "with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, s_for, s_bac,", "= K.dot(prev_output, self.recurrent_kernel) h_const = K.dot(constant, self.constant_kernel) output = h_input" ]
[ "from tornado.platform import interface class Waker(interface.Waker): \"\"\"Create an OS independent", "socket.error as detail: if (not hasattr(errno, 'WSAEADDRINUSE') or detail[0] !=", "# assigned (host, port) pair try: self.writer.connect(connect_address) break # success", "first try). See the long thread at # http://mail.zope.org/pipermail/zope/2005-July/160433.html #", "def write_fileno(self): return self.writer.fileno() def wake(self): try: self.writer.send(b\"x\") except (IOError,", "= self.reader.recv(1024) if not result: break except (IOError, socket.error): pass", "write_fileno(self): return self.writer.fileno() def wake(self): try: self.writer.send(b\"x\") except (IOError, socket.error):", "0 while 1: count += 1 # Bind to a", "WinXP Pro SP2 boxes, under # Pythons 2.3.5 and 2.4.1.", "port for us. # Unfortunately, stress tests showed that we", "use on platforms that don't have os.pipe() (or where pipes", "= self.reader.fileno() def fileno(self): return self.reader.fileno() def write_fileno(self): return self.writer.fileno()", "# never triggered in Tim's tests if count >= 10:", "while True: result = self.reader.recv(1024) if not result: break except", "pass def consume(self): try: while True: result = self.reader.recv(1024) if", "but it didn't appear to help or hurt. a.close() self.reader,", "(not hasattr(errno, 'WSAEADDRINUSE') or detail[0] != errno.WSAEADDRINUSE): # \"Address already", "again. Note: I originally put a short # sleep() here,", "# and we want that sent immediately, to wake up", "the Windows socket implementation. # So we loop until a", "originally put a short # sleep() here, but it didn't", "wake up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) count = 0 while", "# be able to connect to that port (\"Address already", "'Address already in use') # assert count <= 2 #", "count >= 10: # I've never seen it go above", "import socket from tornado.platform import interface class Waker(interface.Waker): \"\"\"Create an", "a.bind((\"127.0.0.1\", 0)) a.listen(1) connect_address = a.getsockname() # assigned (host, port)", "we may not # be able to connect to that", "have sockets. This includes Windows and Jython. \"\"\" def __init__(self):", "Jython. \"\"\" def __init__(self): # Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py", "if not result: break except (IOError, socket.error): pass def close(self):", "__init__(self): # Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer = socket.socket()", "hurt. a.close() self.reader, addr = a.accept() self.reader.setblocking(0) self.writer.setblocking(0) a.close() self.reader_fd", "SP2 boxes, under # Pythons 2.3.5 and 2.4.1. raise #", "byte, # and we want that sent immediately, to wake", "Pythons 2.3.5 and 2.4.1. raise # (10048, 'Address already in", "self.writer.connect(connect_address) break # success except socket.error as detail: if (not", "above 2 a.close() self.writer.close() raise socket.error(\"Cannot bind trigger!\") # Close", "seen on two WinXP Pro SP2 boxes, under # Pythons", "2 a.close() self.writer.close() raise socket.error(\"Cannot bind trigger!\") # Close `a`", "connect to that port (\"Address already in # use\") despite", "1 # Bind to a local port; for efficiency, let", "have os.pipe() (or where pipes cannot be passed to select()),", "\"\"\"Create an OS independent asynchronous pipe. For use on platforms", "errno import socket from tornado.platform import interface class Waker(interface.Waker): \"\"\"Create", "it. This appears # to be a race bug in", "self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) count = 0 while 1: count +=", "detail[0] != errno.WSAEADDRINUSE): # \"Address already in use\" is the", "fileno(self): return self.reader.fileno() def write_fileno(self): return self.writer.fileno() def wake(self): try:", "OS pick # a free port for us. # Unfortunately,", "# Close `a` and try again. Note: I originally put", "and Jython. \"\"\" def __init__(self): # Based on Zope async.py:", "the OS picked it. This appears # to be a", "hasattr(errno, 'WSAEADDRINUSE') or detail[0] != errno.WSAEADDRINUSE): # \"Address already in", "the first try). See the long thread at # http://mail.zope.org/pipermail/zope/2005-July/160433.html", "Close `a` and try again. Note: I originally put a", "This includes Windows and Jython. \"\"\" def __init__(self): # Based", "(IOError, socket.error): pass def consume(self): try: while True: result =", "and try again. Note: I originally put a short #", "on platforms that don't have os.pipe() (or where pipes cannot", "result: break except (IOError, socket.error): pass def close(self): self.reader.close() self.writer.close()", "here, but it didn't appear to help or hurt. a.close()", "bug in the Windows socket implementation. # So we loop", "a = socket.socket() a.bind((\"127.0.0.1\", 0)) a.listen(1) connect_address = a.getsockname() #", "in use\" is the only error # I've seen on", "appear to help or hurt. a.close() self.reader, addr = a.accept()", "a.close() self.reader_fd = self.reader.fileno() def fileno(self): return self.reader.fileno() def write_fileno(self):", "a free port for us. # Unfortunately, stress tests showed", "as detail: if (not hasattr(errno, 'WSAEADDRINUSE') or detail[0] != errno.WSAEADDRINUSE):", "= socket.socket() # Disable buffering -- pulling the trigger sends", "triggered in Tim's tests if count >= 10: # I've", "under # Pythons 2.3.5 and 2.4.1. raise # (10048, 'Address", "it didn't appear to help or hurt. a.close() self.reader, addr", "socket from tornado.platform import interface class Waker(interface.Waker): \"\"\"Create an OS", "errno.WSAEADDRINUSE): # \"Address already in use\" is the only error", "# \"Address already in use\" is the only error #", "import interface class Waker(interface.Waker): \"\"\"Create an OS independent asynchronous pipe.", "seen it go above 2 a.close() self.writer.close() raise socket.error(\"Cannot bind", "Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer = socket.socket() # Disable", "in # use\") despite that the OS picked it. This", "2 # never triggered in Tim's tests if count >=", "Tim's tests if count >= 10: # I've never seen", "it go above 2 a.close() self.writer.close() raise socket.error(\"Cannot bind trigger!\")", "# success except socket.error as detail: if (not hasattr(errno, 'WSAEADDRINUSE')", "didn't appear to help or hurt. a.close() self.reader, addr =", "Windows and Jython. \"\"\" def __init__(self): # Based on Zope", "\"\"\"Lowest-common-denominator implementations of platform functionality.\"\"\" from __future__ import absolute_import, division,", "port) pair try: self.writer.connect(connect_address) break # success except socket.error as", "pair try: self.writer.connect(connect_address) break # success except socket.error as detail:", "absolute_import, division, print_function, with_statement import errno import socket from tornado.platform", "put a short # sleep() here, but it didn't appear", "count = 0 while 1: count += 1 # Bind", "a.close() self.reader, addr = a.accept() self.reader.setblocking(0) self.writer.setblocking(0) a.close() self.reader_fd =", "# I've seen on two WinXP Pro SP2 boxes, under", "\"Address already in use\" is the only error # I've", "detail: if (not hasattr(errno, 'WSAEADDRINUSE') or detail[0] != errno.WSAEADDRINUSE): #", "# sleep() here, but it didn't appear to help or", "# on the first try). See the long thread at", "<= 2 # never triggered in Tim's tests if count", "self.reader.setblocking(0) self.writer.setblocking(0) a.close() self.reader_fd = self.reader.fileno() def fileno(self): return self.reader.fileno()", "Pro SP2 boxes, under # Pythons 2.3.5 and 2.4.1. raise", "the long thread at # http://mail.zope.org/pipermail/zope/2005-July/160433.html # for hideous details.", "I've seen on two WinXP Pro SP2 boxes, under #", "ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) count = 0 while 1: count", "assigned (host, port) pair try: self.writer.connect(connect_address) break # success except", "True: result = self.reader.recv(1024) if not result: break except (IOError,", "# Disable buffering -- pulling the trigger sends 1 byte,", "two WinXP Pro SP2 boxes, under # Pythons 2.3.5 and", "efficiency, let the OS pick # a free port for", "self.reader.fileno() def fileno(self): return self.reader.fileno() def write_fileno(self): return self.writer.fileno() def", "tornado.platform import interface class Waker(interface.Waker): \"\"\"Create an OS independent asynchronous", "we want that sent immediately, to wake up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP,", "Note: I originally put a short # sleep() here, but", "(\"Address already in # use\") despite that the OS picked", "functionality.\"\"\" from __future__ import absolute_import, division, print_function, with_statement import errno", "Windows socket implementation. # So we loop until a connect()", "never triggered in Tim's tests if count >= 10: #", "assert count <= 2 # never triggered in Tim's tests", "class Waker(interface.Waker): \"\"\"Create an OS independent asynchronous pipe. For use", "on two WinXP Pro SP2 boxes, under # Pythons 2.3.5", "not result: break except (IOError, socket.error): pass def close(self): self.reader.close()", "So we loop until a connect() succeeds (almost always #", "use') # assert count <= 2 # never triggered in", "# use\") despite that the OS picked it. This appears", "\"\"\" def __init__(self): # Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer", "the only error # I've seen on two WinXP Pro", "I've never seen it go above 2 a.close() self.writer.close() raise", "in the Windows socket implementation. # So we loop until", "up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) count = 0 while 1:", "where pipes cannot be passed to select()), but do have", "0)) a.listen(1) connect_address = a.getsockname() # assigned (host, port) pair", "<gh_stars>100-1000 \"\"\"Lowest-common-denominator implementations of platform functionality.\"\"\" from __future__ import absolute_import,", "result = self.reader.recv(1024) if not result: break except (IOError, socket.error):", "a connect() succeeds (almost always # on the first try).", "2.4.1. raise # (10048, 'Address already in use') # assert", "be a race bug in the Windows socket implementation. #", "that sent immediately, to wake up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)", "-- pulling the trigger sends 1 byte, # and we", "a.close() self.writer.close() raise socket.error(\"Cannot bind trigger!\") # Close `a` and", "until a connect() succeeds (almost always # on the first", "an OS independent asynchronous pipe. For use on platforms that", "sends 1 byte, # and we want that sent immediately,", "= socket.socket() a.bind((\"127.0.0.1\", 0)) a.listen(1) connect_address = a.getsockname() # assigned", "connect() succeeds (almost always # on the first try). See", "let the OS pick # a free port for us.", "on the first try). See the long thread at #", "break # success except socket.error as detail: if (not hasattr(errno,", "that the OS picked it. This appears # to be", "on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer = socket.socket() # Disable buffering", "consume(self): try: while True: result = self.reader.recv(1024) if not result:", "able to connect to that port (\"Address already in #", "independent asynchronous pipe. For use on platforms that don't have", "succeeds (almost always # on the first try). See the", "return self.writer.fileno() def wake(self): try: self.writer.send(b\"x\") except (IOError, socket.error): pass", "already in # use\") despite that the OS picked it.", "a.accept() self.reader.setblocking(0) self.writer.setblocking(0) a.close() self.reader_fd = self.reader.fileno() def fileno(self): return", "# Pythons 2.3.5 and 2.4.1. raise # (10048, 'Address already", "to that port (\"Address already in # use\") despite that", "(almost always # on the first try). See the long", "to be a race bug in the Windows socket implementation.", "# to be a race bug in the Windows socket", "platforms that don't have os.pipe() (or where pipes cannot be", "(or where pipes cannot be passed to select()), but do", "count <= 2 # never triggered in Tim's tests if", "to a local port; for efficiency, let the OS pick", "be able to connect to that port (\"Address already in", "is the only error # I've seen on two WinXP", "1 byte, # and we want that sent immediately, to", "(10048, 'Address already in use') # assert count <= 2", "long thread at # http://mail.zope.org/pipermail/zope/2005-July/160433.html # for hideous details. a", "want that sent immediately, to wake up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY,", "= a.accept() self.reader.setblocking(0) self.writer.setblocking(0) a.close() self.reader_fd = self.reader.fileno() def fileno(self):", "never seen it go above 2 a.close() self.writer.close() raise socket.error(\"Cannot", "of platform functionality.\"\"\" from __future__ import absolute_import, division, print_function, with_statement", "os.pipe() (or where pipes cannot be passed to select()), but", "# I've never seen it go above 2 a.close() self.writer.close()", "for us. # Unfortunately, stress tests showed that we may", "try: while True: result = self.reader.recv(1024) if not result: break", "already in use\" is the only error # I've seen", "try again. Note: I originally put a short # sleep()", "in use') # assert count <= 2 # never triggered", ">= 10: # I've never seen it go above 2", "self.writer.close() raise socket.error(\"Cannot bind trigger!\") # Close `a` and try", "bind trigger!\") # Close `a` and try again. Note: I", "__future__ import absolute_import, division, print_function, with_statement import errno import socket", "self.reader_fd = self.reader.fileno() def fileno(self): return self.reader.fileno() def write_fileno(self): return", "self.writer.setblocking(0) a.close() self.reader_fd = self.reader.fileno() def fileno(self): return self.reader.fileno() def", "# So we loop until a connect() succeeds (almost always", "for hideous details. a = socket.socket() a.bind((\"127.0.0.1\", 0)) a.listen(1) connect_address", "port; for efficiency, let the OS pick # a free", "tests showed that we may not # be able to", "a short # sleep() here, but it didn't appear to", "if (not hasattr(errno, 'WSAEADDRINUSE') or detail[0] != errno.WSAEADDRINUSE): # \"Address", "use\") despite that the OS picked it. This appears #", "use\" is the only error # I've seen on two", "def __init__(self): # Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer =", "implementations of platform functionality.\"\"\" from __future__ import absolute_import, division, print_function,", "OS independent asynchronous pipe. For use on platforms that don't", "try: self.writer.send(b\"x\") except (IOError, socket.error): pass def consume(self): try: while", "not # be able to connect to that port (\"Address", "despite that the OS picked it. This appears # to", "if count >= 10: # I've never seen it go", "help or hurt. a.close() self.reader, addr = a.accept() self.reader.setblocking(0) self.writer.setblocking(0)", "thread at # http://mail.zope.org/pipermail/zope/2005-July/160433.html # for hideous details. a =", "Disable buffering -- pulling the trigger sends 1 byte, #", "success except socket.error as detail: if (not hasattr(errno, 'WSAEADDRINUSE') or", "Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer = socket.socket() # Disable buffering --", "# (10048, 'Address already in use') # assert count <=", "try: self.writer.connect(connect_address) break # success except socket.error as detail: if", "platform functionality.\"\"\" from __future__ import absolute_import, division, print_function, with_statement import", "select()), but do have sockets. This includes Windows and Jython.", "already in use') # assert count <= 2 # never", "For use on platforms that don't have os.pipe() (or where", "be passed to select()), but do have sockets. This includes", "http://mail.zope.org/pipermail/zope/2005-July/160433.html # for hideous details. a = socket.socket() a.bind((\"127.0.0.1\", 0))", "that we may not # be able to connect to", "socket.error(\"Cannot bind trigger!\") # Close `a` and try again. Note:", "self.writer.send(b\"x\") except (IOError, socket.error): pass def consume(self): try: while True:", "don't have os.pipe() (or where pipes cannot be passed to", "# assert count <= 2 # never triggered in Tim's", "buffering -- pulling the trigger sends 1 byte, # and", "# Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer = socket.socket() #", "the trigger sends 1 byte, # and we want that", "def consume(self): try: while True: result = self.reader.recv(1024) if not", "# Unfortunately, stress tests showed that we may not #", "sent immediately, to wake up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) count", "OS picked it. This appears # to be a race", "count += 1 # Bind to a local port; for", "we loop until a connect() succeeds (almost always # on", "trigger!\") # Close `a` and try again. Note: I originally", "us. # Unfortunately, stress tests showed that we may not", "except (IOError, socket.error): pass def consume(self): try: while True: result", "in Tim's tests if count >= 10: # I've never", "print_function, with_statement import errno import socket from tornado.platform import interface", "def wake(self): try: self.writer.send(b\"x\") except (IOError, socket.error): pass def consume(self):", "except socket.error as detail: if (not hasattr(errno, 'WSAEADDRINUSE') or detail[0]", "the OS pick # a free port for us. #", "1) count = 0 while 1: count += 1 #", "port (\"Address already in # use\") despite that the OS", "includes Windows and Jython. \"\"\" def __init__(self): # Based on", "race bug in the Windows socket implementation. # So we", "Bind to a local port; for efficiency, let the OS", "to connect to that port (\"Address already in # use\")", "1: count += 1 # Bind to a local port;", "socket implementation. # So we loop until a connect() succeeds", "cannot be passed to select()), but do have sockets. This", "and we want that sent immediately, to wake up ASAP.", "for efficiency, let the OS pick # a free port", "sockets. This includes Windows and Jython. \"\"\" def __init__(self): #", "loop until a connect() succeeds (almost always # on the", "raise socket.error(\"Cannot bind trigger!\") # Close `a` and try again.", "that don't have os.pipe() (or where pipes cannot be passed", "Unfortunately, stress tests showed that we may not # be", "# a free port for us. # Unfortunately, stress tests", "self.reader, addr = a.accept() self.reader.setblocking(0) self.writer.setblocking(0) a.close() self.reader_fd = self.reader.fileno()", "I originally put a short # sleep() here, but it", "self.writer.fileno() def wake(self): try: self.writer.send(b\"x\") except (IOError, socket.error): pass def", "immediately, to wake up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) count =", "# for hideous details. a = socket.socket() a.bind((\"127.0.0.1\", 0)) a.listen(1)", "self.reader.recv(1024) if not result: break except (IOError, socket.error): pass def", "= 0 while 1: count += 1 # Bind to", "appears # to be a race bug in the Windows", "def fileno(self): return self.reader.fileno() def write_fileno(self): return self.writer.fileno() def wake(self):", "to select()), but do have sockets. This includes Windows and", "# Bind to a local port; for efficiency, let the", "pulling the trigger sends 1 byte, # and we want", "may not # be able to connect to that port", "Waker(interface.Waker): \"\"\"Create an OS independent asynchronous pipe. For use on", "pipe. For use on platforms that don't have os.pipe() (or", "+= 1 # Bind to a local port; for efficiency,", "passed to select()), but do have sockets. This includes Windows", "addr = a.accept() self.reader.setblocking(0) self.writer.setblocking(0) a.close() self.reader_fd = self.reader.fileno() def", "hideous details. a = socket.socket() a.bind((\"127.0.0.1\", 0)) a.listen(1) connect_address =", "10: # I've never seen it go above 2 a.close()", "a local port; for efficiency, let the OS pick #", "tests if count >= 10: # I've never seen it", "'WSAEADDRINUSE') or detail[0] != errno.WSAEADDRINUSE): # \"Address already in use\"", "socket.socket() a.bind((\"127.0.0.1\", 0)) a.listen(1) connect_address = a.getsockname() # assigned (host,", "# http://mail.zope.org/pipermail/zope/2005-July/160433.html # for hideous details. a = socket.socket() a.bind((\"127.0.0.1\",", "sleep() here, but it didn't appear to help or hurt.", "do have sockets. This includes Windows and Jython. \"\"\" def", "only error # I've seen on two WinXP Pro SP2", "with_statement import errno import socket from tornado.platform import interface class", "return self.reader.fileno() def write_fileno(self): return self.writer.fileno() def wake(self): try: self.writer.send(b\"x\")", "self.writer = socket.socket() # Disable buffering -- pulling the trigger", "or hurt. a.close() self.reader, addr = a.accept() self.reader.setblocking(0) self.writer.setblocking(0) a.close()", "http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer = socket.socket() # Disable buffering -- pulling the", "that port (\"Address already in # use\") despite that the", "socket.TCP_NODELAY, 1) count = 0 while 1: count += 1", "See the long thread at # http://mail.zope.org/pipermail/zope/2005-July/160433.html # for hideous", "to wake up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) count = 0", "wake(self): try: self.writer.send(b\"x\") except (IOError, socket.error): pass def consume(self): try:", "connect_address = a.getsockname() # assigned (host, port) pair try: self.writer.connect(connect_address)", "2.3.5 and 2.4.1. raise # (10048, 'Address already in use')", "interface class Waker(interface.Waker): \"\"\"Create an OS independent asynchronous pipe. For", "try). See the long thread at # http://mail.zope.org/pipermail/zope/2005-July/160433.html # for", "asynchronous pipe. For use on platforms that don't have os.pipe()", "but do have sockets. This includes Windows and Jython. \"\"\"", "!= errno.WSAEADDRINUSE): # \"Address already in use\" is the only", "(host, port) pair try: self.writer.connect(connect_address) break # success except socket.error", "from __future__ import absolute_import, division, print_function, with_statement import errno import", "and 2.4.1. raise # (10048, 'Address already in use') #", "async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer = socket.socket() # Disable buffering -- pulling", "local port; for efficiency, let the OS pick # a", "a.listen(1) connect_address = a.getsockname() # assigned (host, port) pair try:", "`a` and try again. Note: I originally put a short", "socket.socket() # Disable buffering -- pulling the trigger sends 1", "to help or hurt. a.close() self.reader, addr = a.accept() self.reader.setblocking(0)", "socket.error): pass def consume(self): try: while True: result = self.reader.recv(1024)", "pick # a free port for us. # Unfortunately, stress", "trigger sends 1 byte, # and we want that sent", "stress tests showed that we may not # be able", "go above 2 a.close() self.writer.close() raise socket.error(\"Cannot bind trigger!\") #", "while 1: count += 1 # Bind to a local", "or detail[0] != errno.WSAEADDRINUSE): # \"Address already in use\" is", "boxes, under # Pythons 2.3.5 and 2.4.1. raise # (10048,", "self.reader.fileno() def write_fileno(self): return self.writer.fileno() def wake(self): try: self.writer.send(b\"x\") except", "short # sleep() here, but it didn't appear to help", "import absolute_import, division, print_function, with_statement import errno import socket from", "raise # (10048, 'Address already in use') # assert count", "details. a = socket.socket() a.bind((\"127.0.0.1\", 0)) a.listen(1) connect_address = a.getsockname()", "at # http://mail.zope.org/pipermail/zope/2005-July/160433.html # for hideous details. a = socket.socket()", "picked it. This appears # to be a race bug", "= a.getsockname() # assigned (host, port) pair try: self.writer.connect(connect_address) break", "a race bug in the Windows socket implementation. # So", "pipes cannot be passed to select()), but do have sockets.", "free port for us. # Unfortunately, stress tests showed that", "error # I've seen on two WinXP Pro SP2 boxes,", "implementation. # So we loop until a connect() succeeds (almost", "showed that we may not # be able to connect", "always # on the first try). See the long thread", "division, print_function, with_statement import errno import socket from tornado.platform import", "import errno import socket from tornado.platform import interface class Waker(interface.Waker):", "This appears # to be a race bug in the", "a.getsockname() # assigned (host, port) pair try: self.writer.connect(connect_address) break #" ]
[ "pathymetric data. The file current named mapserv.png was obtained using", "\"--num-leds\", dest=\"num_leds\", help=\"Number of LEDs in strip (ex: 60)\", default=config[\"num_leds\"])", "if args: print(\"Unknown parameters: \" + args) # grab the", "strip (ex: 70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\", help=\"How often to", "options.portname loc = options.location rate = options.update_rate delta = options.delta_latitude", "sys MAX_ERRORS = 3 num_errors = 0 # Obtain default", "different formats. cols, rows = im.size a = np.asarray(im) #", "print(\"Next latitude: \" + str(loc[0])) # grab the applicable pixel", "del a del im # Tape resets to stored pattern", "be 0, but this will result in a never-changing LEDs.", "dest=\"delta_latitude\", help=\"Change in latitude during update (ex: 5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\",", "2) samples a 'number of LEDs' number of pixels from", "pattern after a few seconds of inactivity sleep(rate * 60)", "str(latitude_index)) print(\"Lon index: \" + str(longitude_index)) print(\"Next latitude: \" +", "longitude output_pixels = np.roll(output_pixels, longitude_index, axis=0) # send all pixel", "1, max(0, (int)(((loc[1] - -180) / (180 - -180)) *", "f: config = json.load(f) # Default Blinky Tape port on", "of shape (rows, cols, channels) # map loc latitude to", "overflow print(\"Lat index: \" + str(latitude_index)) print(\"Lon index: \" +", "strip. Defaults to (0, 0) -u/--update-interval: int Update interval of", "script will modulate the blinky lights using the following algorithm:", "import Image import numpy as np import sys MAX_ERRORS =", "+ args) # grab the values provided by user (or", "following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu of providing command line arguments,", "with open(\"./bathymetry_blink/bathy_config.json\") as f: config = json.load(f) # Default Blinky", "of pixel data from bathy image 2) samples a 'number", "PNG image that contains the color coded pathymetric data. The", "edit the defaults in bath_config.json. NOTE: runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/')", "load image im = Image.open(i_name) # Can be many different", "import optparse import json from blinkytape import BlinkyTape from time", "- 1, max(0, (int)(((loc[0] - -90) / (90 - -90))", "parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\", help=\"How often to update elevation profile (mins)", "to center around the specified longitude output_pixels = np.roll(output_pixels, longitude_index,", "= options.image_name # Some visual indication that it works, for", "'COM5'. -d/--delta_latitude: int Vertical change in latitude every update rate.", "pixel in output_pixels: print(\"Sending r: {}, g: {}, b: {}\".format(*pixel))", "center of the LED strip. Defaults to (0, 0) -u/--update-interval:", "bt for pixel in output_pixels: print(\"Sending r: {}, g: {},", "number of pixels from that row 3) shifts the sampled", "data bt.show() num_errors += 1 if num_errors > MAX_ERRORS: sys.exit(\"Error", "row 3) shifts the sampled row data to center it", "+ str(latitude_index)) print(\"Lon index: \" + str(longitude_index)) print(\"Next latitude: \"", "+ str(longitude_index)) print(\"Next latitude: \" + str(loc[0])) # grab the", "(C) 2021 <NAME> (https://joeycodes.dev) MIT Licensed \"\"\" import optparse import", "the script, in minutes. Defaults to 10. -p/--port: str Serial", "take loc[0] += delta loc[0] = ((loc[0] + 90) %", "axis=0) # send all pixel data to bt for pixel", "Wait specified number of minutes except KeyboardInterrupt: print(\"Keyboard interrupt, ending", "minutes except KeyboardInterrupt: print(\"Keyboard interrupt, ending program.\") sys.exit() except RuntimeError", "wdir='/BlinkyTape_Python/') (C) 2021 <NAME> (https://joeycodes.dev) MIT Licensed \"\"\" import optparse", "bt.displayColor(0, 100, 0) bt.show() sleep(2) while True: try: # first,", "the LED strip. Defaults to (0, 0) -u/--update-interval: int Update", "memory management del a del im # Tape resets to", "for headless setups (green tape) bt = BlinkyTape(port, n_leds) bt.displayColor(0,", "(0, 0) -u/--update-interval: int Update interval of the script, in", "blinkytape import BlinkyTape from time import sleep from PIL import", "uses user-provided location to obtain row of pixel data from", "works, for headless setups (green tape) bt = BlinkyTape(port, n_leds)", "sleeps for user-specified period of time Uses the following arguments:", "for user-specified period of time Uses the following arguments: -l/--location:", "/ (180 - -180)) * (cols - 0) + 0)))", "indices = [(int)(x*(cols/n_leds)) for x in range(n_leds)] # sample that", "str(loc[0])) # grab the applicable pixel indices indices = [(int)(x*(cols/n_leds))", "-90) / (90 - -90)) * (rows - 0) +", "= options.delta_latitude n_leds = options.num_leds i_name = options.image_name # Some", "x in range(n_leds)] # sample that row of pixel data", "specified longitude output_pixels = np.roll(output_pixels, longitude_index, axis=0) # send all", "Location of the user in tuple(lat, lon). This represents the", "90) % 180) - 90 # wraps to next pole", "arguments, you may alternatively edit the defaults in bath_config.json. NOTE:", "Can be many different formats. cols, rows = im.size a", "port of the BlinkyLight (e.g., 'ttyAMA0', 'COM3'). Defaults to 'COM5'.", "the row to center around the specified longitude output_pixels =", "after a few seconds of inactivity sleep(rate * 60) #", "\"--delta-latitude\", dest=\"delta_latitude\", help=\"Change in latitude during update (ex: 5)\", default=config[\"delta_latitude\"])", "5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\", help=\"Change in latitude during update", "any incomplete data bt.show() num_errors += 1 if num_errors >", "(ex: ./mapserv.png)\", default=config[\"image\"]) (options, args) = parser.parse_args() if args: print(\"Unknown", "BlinkyLight (e.g., 'ttyAMA0', 'COM3'). Defaults to 'COM5'. -d/--delta_latitude: int Vertical", "# Wait specified number of minutes # sleep(10) # Wait", "map/bathymetry image (ex: ./mapserv.png)\", default=config[\"image\"]) (options, args) = parser.parse_args() if", "* (rows - 0) + 0))) longitude_index = min(cols -", "60) # Wait specified number of minutes # sleep(10) #", "pixels from that row 3) shifts the sampled row data", "Vertical change in latitude every update rate. May be 0,", "print(\"Lat index: \" + str(latitude_index)) print(\"Lon index: \" + str(longitude_index))", "default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\", help=\"How often to update elevation profile", "print(\"Unknown parameters: \" + args) # grab the values provided", "from that row 3) shifts the sampled row data to", "= optparse.OptionParser() parser.add_option(\"-p\", \"--port\", dest=\"portname\", help=\"serial port (ex: /dev/ttyACM0)\", default=config[\"port\"])", "(ex: 60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\", dest=\"image_name\", help=\"Name of the map/bathymetry", "the image bt.show() # delete variables for memory management del", "num_errors += 1 if num_errors > MAX_ERRORS: sys.exit(\"Error count exceeds", "by a given latitude, also specified by user 6) sleeps", "(mins) (ex: 5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\", help=\"Change in latitude", "# Tape resets to stored pattern after a few seconds", "longitude_index = min(cols - 1, max(0, (int)(((loc[1] - -180) /", "line arguments, you may alternatively edit the defaults in bath_config.json.", "while True: try: # first, load image im = Image.open(i_name)", "represents the center of the LED strip. Defaults to (0,", "'ttyAMA0', 'COM3'). Defaults to 'COM5'. -d/--delta_latitude: int Vertical change in", "sleep(10) # Wait specified number of minutes except KeyboardInterrupt: print(\"Keyboard", "mapserv.png was obtained using the following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu", "latitude during update (ex: 5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\", help=\"Number", "the color coded pathymetric data. The file current named mapserv.png", "resets to stored pattern after a few seconds of inactivity", "Update interval of the script, in minutes. Defaults to 10.", "in latitude every update rate. May be 0, but this", "row by a given latitude, also specified by user 6)", "dest=\"portname\", help=\"serial port (ex: /dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\", \"--location\", dest=\"location\", help=\"Location", "# of shape (rows, cols, channels) # map loc latitude", "grab the applicable pixel indices indices = [(int)(x*(cols/n_leds)) for x", "config = json.load(f) # Default Blinky Tape port on Raspberry", "given latitude, also specified by user 6) sleeps for user-specified", "minutes # sleep(10) # Wait specified number of minutes except", "on Raspberry Pi is /dev/ttyACM0 parser = optparse.OptionParser() parser.add_option(\"-p\", \"--port\",", "https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu of providing command line arguments, you may", "update the location of the next row of elevation data", "= options.location rate = options.update_rate delta = options.delta_latitude n_leds =", "- -180)) * (cols - 0) + 0))) # update", "as f: config = json.load(f) # Default Blinky Tape port", "many different formats. cols, rows = im.size a = np.asarray(im)", "a = np.asarray(im) # of shape (rows, cols, channels) #", "\"\"\" This script will modulate the blinky lights using the", "Default Blinky Tape port on Raspberry Pi is /dev/ttyACM0 parser", "Name of the PNG image that contains the color coded", "\" + e.args[0]) # flush any incomplete data bt.show() num_errors", "# update the location of the next row of elevation", "user-specified period of time Uses the following arguments: -l/--location: tuple", "((loc[0] + 90) % 180) - 90 # wraps to", "user-provided location to obtain row of pixel data from bathy", "100, 0) bt.show() sleep(2) while True: try: # first, load", "the center of the LED strip. Defaults to (0, 0)", "user 6) sleeps for user-specified period of time Uses the", "= BlinkyTape(port, n_leds) bt.displayColor(0, 100, 0) bt.show() sleep(2) while True:", "program.\") sys.exit() except RuntimeError as e: print(\"Encountered runtime error: \"", "first, load image im = Image.open(i_name) # Can be many", "Serial port of the BlinkyLight (e.g., 'ttyAMA0', 'COM3'). Defaults to", "{}, g: {}, b: {}\".format(*pixel)) bt.sendPixel(*pixel) # finally, show the", "user in tuple(lat, lon). This represents the center of the", "file current named mapserv.png was obtained using the following API:", "Blinky Tape 5) shifts next row by a given latitude,", "pixel indices indices = [(int)(x*(cols/n_leds)) for x in range(n_leds)] #", "loc latitude to 0-based index latitude_index = min(rows - 1,", "(or defaults) port = options.portname loc = options.location rate =", "args) = parser.parse_args() if args: print(\"Unknown parameters: \" + args)", "= 3 num_errors = 0 # Obtain default parameters with", "np import sys MAX_ERRORS = 3 num_errors = 0 #", "# Some visual indication that it works, for headless setups", "longitude_index, axis=0) # send all pixel data to bt for", "json.load(f) # Default Blinky Tape port on Raspberry Pi is", "map loc latitude to 0-based index latitude_index = min(rows -", "the center of the LED strip (ex: 70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\",", "'number of LEDs' number of pixels from that row 3)", "Defaults to (0, 0) -u/--update-interval: int Update interval of the", "The file current named mapserv.png was obtained using the following", "options.image_name # Some visual indication that it works, for headless", "all pixel data to bt for pixel in output_pixels: print(\"Sending", "update elevation profile (mins) (ex: 5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\",", "current named mapserv.png was obtained using the following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0", "- 0) + 0))) longitude_index = min(cols - 1, max(0,", "a 'number of LEDs' number of pixels from that row", "n_leds = options.num_leds i_name = options.image_name # Some visual indication", "Image import numpy as np import sys MAX_ERRORS = 3", "next row by a given latitude, also specified by user", "runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C) 2021 <NAME> (https://joeycodes.dev) MIT Licensed \"\"\" import", "(rows, cols, channels) # map loc latitude to 0-based index", "data from bathy image 2) samples a 'number of LEDs'", "it at the location specified by user 4) displays resulting", "LEDs in strip (ex: 60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\", dest=\"image_name\", help=\"Name", "0) -u/--update-interval: int Update interval of the script, in minutes.", "KeyboardInterrupt: print(\"Keyboard interrupt, ending program.\") sys.exit() except RuntimeError as e:", "of the PNG image that contains the color coded pathymetric", "in bath_config.json. NOTE: runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C) 2021 <NAME>", "the specified longitude output_pixels = np.roll(output_pixels, longitude_index, axis=0) # send", "channels) # map loc latitude to 0-based index latitude_index =", "rows = im.size a = np.asarray(im) # of shape (rows,", "the blinky lights using the following algorithm: 1) uses user-provided", "pixel data output_pixels = np.take(a[latitude_index], indices, axis=0) # rotate the", "output_pixels = np.take(a[latitude_index], indices, axis=0) # rotate the row to", "the location of the next row of elevation data to", "be many different formats. cols, rows = im.size a =", "specified by user 6) sleeps for user-specified period of time", "of LEDs in strip (ex: 60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\", dest=\"image_name\",", "row to center around the specified longitude output_pixels = np.roll(output_pixels,", "few seconds of inactivity sleep(rate * 60) # Wait specified", "help=\"Location of the center of the LED strip (ex: 70,-110)\",", "dest=\"image_name\", help=\"Name of the map/bathymetry image (ex: ./mapserv.png)\", default=config[\"image\"]) (options,", "to 0-based index latitude_index = min(rows - 1, max(0, (int)(((loc[0]", "rate = options.update_rate delta = options.delta_latitude n_leds = options.num_leds i_name", "# map loc latitude to 0-based index latitude_index = min(rows", "default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\", dest=\"image_name\", help=\"Name of the map/bathymetry image (ex:", "-u/--update-interval: int Update interval of the script, in minutes. Defaults", "modulate the blinky lights using the following algorithm: 1) uses", "API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu of providing command line arguments, you", "- -90)) * (rows - 0) + 0))) longitude_index =", "following arguments: -l/--location: tuple Location of the user in tuple(lat,", "using the following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu of providing command", "tuple Location of the user in tuple(lat, lon). This represents", "in latitude during update (ex: 5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\",", "5) shifts next row by a given latitude, also specified", "/dev/ttyACM0 parser = optparse.OptionParser() parser.add_option(\"-p\", \"--port\", dest=\"portname\", help=\"serial port (ex:", "pole if overflow print(\"Lat index: \" + str(latitude_index)) print(\"Lon index:", "dest=\"update_rate\", help=\"How often to update elevation profile (mins) (ex: 5)\",", "default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\", help=\"Change in latitude during update (ex:", "0) + 0))) # update the location of the next", "0 # Obtain default parameters with open(\"./bathymetry_blink/bathy_config.json\") as f: config", "port on Raspberry Pi is /dev/ttyACM0 parser = optparse.OptionParser() parser.add_option(\"-p\",", "open(\"./bathymetry_blink/bathy_config.json\") as f: config = json.load(f) # Default Blinky Tape", "str Name of the PNG image that contains the color", "rotate the row to center around the specified longitude output_pixels", "from PIL import Image import numpy as np import sys", "/ (90 - -90)) * (rows - 0) + 0)))", "row of pixel data output_pixels = np.take(a[latitude_index], indices, axis=0) #", "named mapserv.png was obtained using the following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In", "# Obtain default parameters with open(\"./bathymetry_blink/bathy_config.json\") as f: config =", "70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\", help=\"How often to update elevation", "options.delta_latitude n_leds = options.num_leds i_name = options.image_name # Some visual", "True: try: # first, load image im = Image.open(i_name) #", "0-based index latitude_index = min(rows - 1, max(0, (int)(((loc[0] -", "= [(int)(x*(cols/n_leds)) for x in range(n_leds)] # sample that row", "options.update_rate delta = options.delta_latitude n_leds = options.num_leds i_name = options.image_name", "flush any incomplete data bt.show() num_errors += 1 if num_errors", "= json.load(f) # Default Blinky Tape port on Raspberry Pi", "pixels on Blinky Tape 5) shifts next row by a", "image (ex: ./mapserv.png)\", default=config[\"image\"]) (options, args) = parser.parse_args() if args:", "= parser.parse_args() if args: print(\"Unknown parameters: \" + args) #", "of the BlinkyLight (e.g., 'ttyAMA0', 'COM3'). Defaults to 'COM5'. -d/--delta_latitude:", "This represents the center of the LED strip. Defaults to", "help=\"How often to update elevation profile (mins) (ex: 5)\", default=config[\"update_rate\"])", "update (ex: 5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\", help=\"Number of LEDs", "contains the color coded pathymetric data. The file current named", "0))) longitude_index = min(cols - 1, max(0, (int)(((loc[1] - -180)", "port (ex: /dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\", \"--location\", dest=\"location\", help=\"Location of the", "Pi is /dev/ttyACM0 parser = optparse.OptionParser() parser.add_option(\"-p\", \"--port\", dest=\"portname\", help=\"serial", "-180) / (180 - -180)) * (cols - 0) +", "(green tape) bt = BlinkyTape(port, n_leds) bt.displayColor(0, 100, 0) bt.show()", "default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\", help=\"Number of LEDs in strip (ex:", "a given latitude, also specified by user 6) sleeps for", "help=\"Name of the map/bathymetry image (ex: ./mapserv.png)\", default=config[\"image\"]) (options, args)", "loc[0] += delta loc[0] = ((loc[0] + 90) % 180)", "90 # wraps to next pole if overflow print(\"Lat index:", "0) bt.show() sleep(2) while True: try: # first, load image", "\" + str(latitude_index)) print(\"Lon index: \" + str(longitude_index)) print(\"Next latitude:", "the values provided by user (or defaults) port = options.portname", "= ((loc[0] + 90) % 180) - 90 # wraps", "of the user in tuple(lat, lon). This represents the center", "wraps to next pole if overflow print(\"Lat index: \" +", "pixel data from bathy image 2) samples a 'number of", "the sampled row data to center it at the location", "(rows - 0) + 0))) longitude_index = min(cols - 1,", "# Wait specified number of minutes except KeyboardInterrupt: print(\"Keyboard interrupt,", "Image.open(i_name) # Can be many different formats. cols, rows =", "# delete variables for memory management del a del im", "sleep from PIL import Image import numpy as np import", "of minutes # sleep(10) # Wait specified number of minutes", "rate. May be 0, but this will result in a", "you may alternatively edit the defaults in bath_config.json. NOTE: runs", "+= 1 if num_errors > MAX_ERRORS: sys.exit(\"Error count exceeds that", "specified number of minutes # sleep(10) # Wait specified number", "Tape port on Raspberry Pi is /dev/ttyACM0 parser = optparse.OptionParser()", "(ex: 5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\", help=\"Number of LEDs in", "latitude every update rate. May be 0, but this will", "in minutes. Defaults to 10. -p/--port: str Serial port of", "parser = optparse.OptionParser() parser.add_option(\"-p\", \"--port\", dest=\"portname\", help=\"serial port (ex: /dev/ttyACM0)\",", "bathy image 2) samples a 'number of LEDs' number of", "may alternatively edit the defaults in bath_config.json. NOTE: runs via:", "but this will result in a never-changing LEDs. -i/--image: str", "location of the next row of elevation data to take", "default=config[\"image\"]) (options, args) = parser.parse_args() if args: print(\"Unknown parameters: \"", "+= delta loc[0] = ((loc[0] + 90) % 180) -", "location specified by user 4) displays resulting pixels on Blinky", "seconds of inactivity sleep(rate * 60) # Wait specified number", "3 num_errors = 0 # Obtain default parameters with open(\"./bathymetry_blink/bathy_config.json\")", "index: \" + str(latitude_index)) print(\"Lon index: \" + str(longitude_index)) print(\"Next", "= Image.open(i_name) # Can be many different formats. cols, rows", "\" + str(longitude_index)) print(\"Next latitude: \" + str(loc[0])) # grab", "del im # Tape resets to stored pattern after a", "options.num_leds i_name = options.image_name # Some visual indication that it", "optparse.OptionParser() parser.add_option(\"-p\", \"--port\", dest=\"portname\", help=\"serial port (ex: /dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\",", "that row 3) shifts the sampled row data to center", "max(0, (int)(((loc[1] - -180) / (180 - -180)) * (cols", "5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\", help=\"Number of LEDs in strip", "if overflow print(\"Lat index: \" + str(latitude_index)) print(\"Lon index: \"", "of the center of the LED strip (ex: 70,-110)\", default=config[\"location\"])", "runtime error: \" + e.args[0]) # flush any incomplete data", "management del a del im # Tape resets to stored", "# first, load image im = Image.open(i_name) # Can be", "args) # grab the values provided by user (or defaults)", "never-changing LEDs. -i/--image: str Name of the PNG image that", "# send all pixel data to bt for pixel in", "# wraps to next pole if overflow print(\"Lat index: \"", "dest=\"location\", help=\"Location of the center of the LED strip (ex:", "0, but this will result in a never-changing LEDs. -i/--image:", "Defaults to 10. -p/--port: str Serial port of the BlinkyLight", "import sys MAX_ERRORS = 3 num_errors = 0 # Obtain", "1) uses user-provided location to obtain row of pixel data", "formats. cols, rows = im.size a = np.asarray(im) # of", "r: {}, g: {}, b: {}\".format(*pixel)) bt.sendPixel(*pixel) # finally, show", "(cols - 0) + 0))) # update the location of", "0) + 0))) longitude_index = min(cols - 1, max(0, (int)(((loc[1]", "print(\"Keyboard interrupt, ending program.\") sys.exit() except RuntimeError as e: print(\"Encountered", "{}, b: {}\".format(*pixel)) bt.sendPixel(*pixel) # finally, show the image bt.show()", "+ 0))) # update the location of the next row", "Uses the following arguments: -l/--location: tuple Location of the user", "providing command line arguments, you may alternatively edit the defaults", "im.size a = np.asarray(im) # of shape (rows, cols, channels)", "data to center it at the location specified by user", "e: print(\"Encountered runtime error: \" + e.args[0]) # flush any", "change in latitude every update rate. May be 0, but", "index latitude_index = min(rows - 1, max(0, (int)(((loc[0] - -90)", "result in a never-changing LEDs. -i/--image: str Name of the", "parser.parse_args() if args: print(\"Unknown parameters: \" + args) # grab", "parser.add_option(\"-l\", \"--location\", dest=\"location\", help=\"Location of the center of the LED", "BlinkyTape(port, n_leds) bt.displayColor(0, 100, 0) bt.show() sleep(2) while True: try:", "often to update elevation profile (mins) (ex: 5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\",", "\" + str(loc[0])) # grab the applicable pixel indices indices", "-i/--image: str Name of the PNG image that contains the", "g: {}, b: {}\".format(*pixel)) bt.sendPixel(*pixel) # finally, show the image", "import numpy as np import sys MAX_ERRORS = 3 num_errors", "LED strip. Defaults to (0, 0) -u/--update-interval: int Update interval", "-180)) * (cols - 0) + 0))) # update the", "bt.sendPixel(*pixel) # finally, show the image bt.show() # delete variables", "# flush any incomplete data bt.show() num_errors += 1 if", "interrupt, ending program.\") sys.exit() except RuntimeError as e: print(\"Encountered runtime", "# Default Blinky Tape port on Raspberry Pi is /dev/ttyACM0", "displays resulting pixels on Blinky Tape 5) shifts next row", "data. The file current named mapserv.png was obtained using the", "to update elevation profile (mins) (ex: 5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\",", "json from blinkytape import BlinkyTape from time import sleep from", "for memory management del a del im # Tape resets", "of inactivity sleep(rate * 60) # Wait specified number of", "of elevation data to take loc[0] += delta loc[0] =", "center of the LED strip (ex: 70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\",", "{}\".format(*pixel)) bt.sendPixel(*pixel) # finally, show the image bt.show() # delete", "except KeyboardInterrupt: print(\"Keyboard interrupt, ending program.\") sys.exit() except RuntimeError as", "help=\"Number of LEDs in strip (ex: 60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\",", "= np.roll(output_pixels, longitude_index, axis=0) # send all pixel data to", "print(\"Lon index: \" + str(longitude_index)) print(\"Next latitude: \" + str(loc[0]))", "4) displays resulting pixels on Blinky Tape 5) shifts next", "PIL import Image import numpy as np import sys MAX_ERRORS", "grab the values provided by user (or defaults) port =", "in tuple(lat, lon). This represents the center of the LED", "indices indices = [(int)(x*(cols/n_leds)) for x in range(n_leds)] # sample", "output_pixels = np.roll(output_pixels, longitude_index, axis=0) # send all pixel data", "values provided by user (or defaults) port = options.portname loc", "interval of the script, in minutes. Defaults to 10. -p/--port:", "specified by user 4) displays resulting pixels on Blinky Tape", "sys.exit() except RuntimeError as e: print(\"Encountered runtime error: \" +", "(https://joeycodes.dev) MIT Licensed \"\"\" import optparse import json from blinkytape", "'COM3'). Defaults to 'COM5'. -d/--delta_latitude: int Vertical change in latitude", "headless setups (green tape) bt = BlinkyTape(port, n_leds) bt.displayColor(0, 100,", "# rotate the row to center around the specified longitude", "this will result in a never-changing LEDs. -i/--image: str Name", "image im = Image.open(i_name) # Can be many different formats.", "delta = options.delta_latitude n_leds = options.num_leds i_name = options.image_name #", "loc = options.location rate = options.update_rate delta = options.delta_latitude n_leds", "parameters with open(\"./bathymetry_blink/bathy_config.json\") as f: config = json.load(f) # Default", "args: print(\"Unknown parameters: \" + args) # grab the values", "for x in range(n_leds)] # sample that row of pixel", "the next row of elevation data to take loc[0] +=", "= np.take(a[latitude_index], indices, axis=0) # rotate the row to center", "np.take(a[latitude_index], indices, axis=0) # rotate the row to center around", "of pixels from that row 3) shifts the sampled row", "time import sleep from PIL import Image import numpy as", "try: # first, load image im = Image.open(i_name) # Can", "to stored pattern after a few seconds of inactivity sleep(rate", "a never-changing LEDs. -i/--image: str Name of the PNG image", "the following arguments: -l/--location: tuple Location of the user in", "MAX_ERRORS = 3 num_errors = 0 # Obtain default parameters", "LED strip (ex: 70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\", help=\"How often", "to bt for pixel in output_pixels: print(\"Sending r: {}, g:", "lights using the following algorithm: 1) uses user-provided location to", "show the image bt.show() # delete variables for memory management", "using the following algorithm: 1) uses user-provided location to obtain", "- 1, max(0, (int)(((loc[1] - -180) / (180 - -180))", "to 'COM5'. -d/--delta_latitude: int Vertical change in latitude every update", "str(longitude_index)) print(\"Next latitude: \" + str(loc[0])) # grab the applicable", "number of minutes # sleep(10) # Wait specified number of", "NOTE: runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C) 2021 <NAME> (https://joeycodes.dev) MIT", "n_leds) bt.displayColor(0, 100, 0) bt.show() sleep(2) while True: try: #", "applicable pixel indices indices = [(int)(x*(cols/n_leds)) for x in range(n_leds)]", "default parameters with open(\"./bathymetry_blink/bathy_config.json\") as f: config = json.load(f) #", "int Vertical change in latitude every update rate. May be", "latitude to 0-based index latitude_index = min(rows - 1, max(0,", "Some visual indication that it works, for headless setups (green", "help=\"Change in latitude during update (ex: 5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\",", "obtain row of pixel data from bathy image 2) samples", "delta loc[0] = ((loc[0] + 90) % 180) - 90", "of the script, in minutes. Defaults to 10. -p/--port: str", "= options.update_rate delta = options.delta_latitude n_leds = options.num_leds i_name =", "(int)(((loc[1] - -180) / (180 - -180)) * (cols -", "the applicable pixel indices indices = [(int)(x*(cols/n_leds)) for x in", "blinky lights using the following algorithm: 1) uses user-provided location", "\"--update-rate\", dest=\"update_rate\", help=\"How often to update elevation profile (mins) (ex:", "BlinkyTape from time import sleep from PIL import Image import", "of LEDs' number of pixels from that row 3) shifts", "color coded pathymetric data. The file current named mapserv.png was", "tape) bt = BlinkyTape(port, n_leds) bt.displayColor(0, 100, 0) bt.show() sleep(2)", "time Uses the following arguments: -l/--location: tuple Location of the", "image 2) samples a 'number of LEDs' number of pixels", "latitude: \" + str(loc[0])) # grab the applicable pixel indices", "cols, channels) # map loc latitude to 0-based index latitude_index", "num_errors = 0 # Obtain default parameters with open(\"./bathymetry_blink/bathy_config.json\") as", "In lieu of providing command line arguments, you may alternatively", "latitude, also specified by user 6) sleeps for user-specified period", "the PNG image that contains the color coded pathymetric data.", "- 0) + 0))) # update the location of the", "of providing command line arguments, you may alternatively edit the", "provided by user (or defaults) port = options.portname loc =", "user (or defaults) port = options.portname loc = options.location rate", "a del im # Tape resets to stored pattern after", "int Update interval of the script, in minutes. Defaults to", "axis=0) # rotate the row to center around the specified", "import json from blinkytape import BlinkyTape from time import sleep", "row data to center it at the location specified by", "image bt.show() # delete variables for memory management del a", "np.asarray(im) # of shape (rows, cols, channels) # map loc", "row of pixel data from bathy image 2) samples a", "<NAME> (https://joeycodes.dev) MIT Licensed \"\"\" import optparse import json from", "next row of elevation data to take loc[0] += delta", "e.args[0]) # flush any incomplete data bt.show() num_errors += 1", "= im.size a = np.asarray(im) # of shape (rows, cols,", "b: {}\".format(*pixel)) bt.sendPixel(*pixel) # finally, show the image bt.show() #", "\"--port\", dest=\"portname\", help=\"serial port (ex: /dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\", \"--location\", dest=\"location\",", "-d/--delta_latitude: int Vertical change in latitude every update rate. May", "+ e.args[0]) # flush any incomplete data bt.show() num_errors +=", "defaults) port = options.portname loc = options.location rate = options.update_rate", "is /dev/ttyACM0 parser = optparse.OptionParser() parser.add_option(\"-p\", \"--port\", dest=\"portname\", help=\"serial port", "location to obtain row of pixel data from bathy image", "\"\"\" import optparse import json from blinkytape import BlinkyTape from", "on Blinky Tape 5) shifts next row by a given", "(e.g., 'ttyAMA0', 'COM3'). Defaults to 'COM5'. -d/--delta_latitude: int Vertical change", "shape (rows, cols, channels) # map loc latitude to 0-based", "parser.add_option(\"-p\", \"--port\", dest=\"portname\", help=\"serial port (ex: /dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\", \"--location\",", "indices, axis=0) # rotate the row to center around the", "bt.show() sleep(2) while True: try: # first, load image im", "will modulate the blinky lights using the following algorithm: 1)", "update rate. May be 0, but this will result in", "visual indication that it works, for headless setups (green tape)", "also specified by user 6) sleeps for user-specified period of", "-p/--port: str Serial port of the BlinkyLight (e.g., 'ttyAMA0', 'COM3').", "of the LED strip (ex: 70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\",", "LEDs' number of pixels from that row 3) shifts the", "that contains the color coded pathymetric data. The file current", "ending program.\") sys.exit() except RuntimeError as e: print(\"Encountered runtime error:", "as np import sys MAX_ERRORS = 3 num_errors = 0", "sleep(2) while True: try: # first, load image im =", "variables for memory management del a del im # Tape", "period of time Uses the following arguments: -l/--location: tuple Location", "command line arguments, you may alternatively edit the defaults in", "= min(rows - 1, max(0, (int)(((loc[0] - -90) / (90", "sample that row of pixel data output_pixels = np.take(a[latitude_index], indices,", "RuntimeError as e: print(\"Encountered runtime error: \" + e.args[0]) #", "at the location specified by user 4) displays resulting pixels", "alternatively edit the defaults in bath_config.json. NOTE: runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py',", "data output_pixels = np.take(a[latitude_index], indices, axis=0) # rotate the row", "index: \" + str(longitude_index)) print(\"Next latitude: \" + str(loc[0])) #", "by user (or defaults) port = options.portname loc = options.location", "- -180) / (180 - -180)) * (cols - 0)", "from bathy image 2) samples a 'number of LEDs' number", "from time import sleep from PIL import Image import numpy", "(options, args) = parser.parse_args() if args: print(\"Unknown parameters: \" +", "send all pixel data to bt for pixel in output_pixels:", "by user 6) sleeps for user-specified period of time Uses", "\"--location\", dest=\"location\", help=\"Location of the center of the LED strip", "elevation profile (mins) (ex: 5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\", help=\"Change", "np.roll(output_pixels, longitude_index, axis=0) # send all pixel data to bt", "1 if num_errors > MAX_ERRORS: sys.exit(\"Error count exceeds that allowed.\")", "specified number of minutes except KeyboardInterrupt: print(\"Keyboard interrupt, ending program.\")", "= np.asarray(im) # of shape (rows, cols, channels) # map", "number of minutes except KeyboardInterrupt: print(\"Keyboard interrupt, ending program.\") sys.exit()", "indication that it works, for headless setups (green tape) bt", "to center it at the location specified by user 4)", "finally, show the image bt.show() # delete variables for memory", "of pixel data output_pixels = np.take(a[latitude_index], indices, axis=0) # rotate", "every update rate. May be 0, but this will result", "sampled row data to center it at the location specified", "user 4) displays resulting pixels on Blinky Tape 5) shifts", "latitude_index = min(rows - 1, max(0, (int)(((loc[0] - -90) /", "next pole if overflow print(\"Lat index: \" + str(latitude_index)) print(\"Lon", "range(n_leds)] # sample that row of pixel data output_pixels =", "resulting pixels on Blinky Tape 5) shifts next row by", "* (cols - 0) + 0))) # update the location", "lieu of providing command line arguments, you may alternatively edit", "+ 90) % 180) - 90 # wraps to next", "parser.add_option(\"-i\", \"--image\", dest=\"image_name\", help=\"Name of the map/bathymetry image (ex: ./mapserv.png)\",", "setups (green tape) bt = BlinkyTape(port, n_leds) bt.displayColor(0, 100, 0)", "+ 0))) longitude_index = min(cols - 1, max(0, (int)(((loc[1] -", "the BlinkyLight (e.g., 'ttyAMA0', 'COM3'). Defaults to 'COM5'. -d/--delta_latitude: int", "shifts next row by a given latitude, also specified by", "- -90) / (90 - -90)) * (rows - 0)", "center around the specified longitude output_pixels = np.roll(output_pixels, longitude_index, axis=0)", "Tape 5) shifts next row by a given latitude, also", "bt = BlinkyTape(port, n_leds) bt.displayColor(0, 100, 0) bt.show() sleep(2) while", "stored pattern after a few seconds of inactivity sleep(rate *", "* 60) # Wait specified number of minutes # sleep(10)", "[(int)(x*(cols/n_leds)) for x in range(n_leds)] # sample that row of", "by user 4) displays resulting pixels on Blinky Tape 5)", "in range(n_leds)] # sample that row of pixel data output_pixels", "This script will modulate the blinky lights using the following", "shifts the sampled row data to center it at the", "data to take loc[0] += delta loc[0] = ((loc[0] +", "that row of pixel data output_pixels = np.take(a[latitude_index], indices, axis=0)", "optparse import json from blinkytape import BlinkyTape from time import", "defaults in bath_config.json. NOTE: runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C) 2021", "to obtain row of pixel data from bathy image 2)", "inactivity sleep(rate * 60) # Wait specified number of minutes", "LEDs. -i/--image: str Name of the PNG image that contains", "the following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu of providing command line", "port = options.portname loc = options.location rate = options.update_rate delta", "3) shifts the sampled row data to center it at", "Defaults to 'COM5'. -d/--delta_latitude: int Vertical change in latitude every", "min(cols - 1, max(0, (int)(((loc[1] - -180) / (180 -", "(ex: 5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\", help=\"Change in latitude during", "elevation data to take loc[0] += delta loc[0] = ((loc[0]", "was obtained using the following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu of", "- 90 # wraps to next pole if overflow print(\"Lat", "im # Tape resets to stored pattern after a few", "./mapserv.png)\", default=config[\"image\"]) (options, args) = parser.parse_args() if args: print(\"Unknown parameters:", "bath_config.json. NOTE: runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C) 2021 <NAME> (https://joeycodes.dev)", "\" + args) # grab the values provided by user", "delete variables for memory management del a del im #", "dest=\"num_leds\", help=\"Number of LEDs in strip (ex: 60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\",", "the user in tuple(lat, lon). This represents the center of", "center it at the location specified by user 4) displays", "around the specified longitude output_pixels = np.roll(output_pixels, longitude_index, axis=0) #", "coded pathymetric data. The file current named mapserv.png was obtained", "import BlinkyTape from time import sleep from PIL import Image", "MIT Licensed \"\"\" import optparse import json from blinkytape import", "= 0 # Obtain default parameters with open(\"./bathymetry_blink/bathy_config.json\") as f:", "a few seconds of inactivity sleep(rate * 60) # Wait", "% 180) - 90 # wraps to next pole if", "options.location rate = options.update_rate delta = options.delta_latitude n_leds = options.num_leds", "print(\"Sending r: {}, g: {}, b: {}\".format(*pixel)) bt.sendPixel(*pixel) # finally,", "to next pole if overflow print(\"Lat index: \" + str(latitude_index))", "# Can be many different formats. cols, rows = im.size", "2021 <NAME> (https://joeycodes.dev) MIT Licensed \"\"\" import optparse import json", "# finally, show the image bt.show() # delete variables for", "strip (ex: 60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\", dest=\"image_name\", help=\"Name of the", "will result in a never-changing LEDs. -i/--image: str Name of", "import sleep from PIL import Image import numpy as np", "samples a 'number of LEDs' number of pixels from that", "tuple(lat, lon). This represents the center of the LED strip.", "+ str(loc[0])) # grab the applicable pixel indices indices =", "sleep(rate * 60) # Wait specified number of minutes #", "# grab the applicable pixel indices indices = [(int)(x*(cols/n_leds)) for", "from blinkytape import BlinkyTape from time import sleep from PIL", "it works, for headless setups (green tape) bt = BlinkyTape(port,", "in a never-changing LEDs. -i/--image: str Name of the PNG", "parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\", help=\"Change in latitude during update (ex: 5)\",", "i_name = options.image_name # Some visual indication that it works,", "image that contains the color coded pathymetric data. The file", "the location specified by user 4) displays resulting pixels on", "of minutes except KeyboardInterrupt: print(\"Keyboard interrupt, ending program.\") sys.exit() except", "numpy as np import sys MAX_ERRORS = 3 num_errors =", "during update (ex: 5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\", help=\"Number of", "except RuntimeError as e: print(\"Encountered runtime error: \" + e.args[0])", "bt.show() num_errors += 1 if num_errors > MAX_ERRORS: sys.exit(\"Error count", "# sample that row of pixel data output_pixels = np.take(a[latitude_index],", "(ex: /dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\", \"--location\", dest=\"location\", help=\"Location of the center", "of the LED strip. Defaults to (0, 0) -u/--update-interval: int", "= options.num_leds i_name = options.image_name # Some visual indication that", "to 10. -p/--port: str Serial port of the BlinkyLight (e.g.,", "6) sleeps for user-specified period of time Uses the following", "that it works, for headless setups (green tape) bt =", "obtained using the following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu of providing", "incomplete data bt.show() num_errors += 1 if num_errors > MAX_ERRORS:", "(180 - -180)) * (cols - 0) + 0))) #", "to take loc[0] += delta loc[0] = ((loc[0] + 90)", "parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\", help=\"Number of LEDs in strip (ex: 60)\",", "(ex: 70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\", help=\"How often to update", "1, max(0, (int)(((loc[0] - -90) / (90 - -90)) *", "May be 0, but this will result in a never-changing", "print(\"Encountered runtime error: \" + e.args[0]) # flush any incomplete", "10. -p/--port: str Serial port of the BlinkyLight (e.g., 'ttyAMA0',", "max(0, (int)(((loc[0] - -90) / (90 - -90)) * (rows", "lon). This represents the center of the LED strip. Defaults", "row of elevation data to take loc[0] += delta loc[0]", "the following algorithm: 1) uses user-provided location to obtain row", "minutes. Defaults to 10. -p/--port: str Serial port of the", "runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C) 2021 <NAME> (https://joeycodes.dev) MIT Licensed", "data to bt for pixel in output_pixels: print(\"Sending r: {},", "60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\", dest=\"image_name\", help=\"Name of the map/bathymetry image", "help=\"serial port (ex: /dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\", \"--location\", dest=\"location\", help=\"Location of", "error: \" + e.args[0]) # flush any incomplete data bt.show()", "of the next row of elevation data to take loc[0]", "-l/--location: tuple Location of the user in tuple(lat, lon). This", "to (0, 0) -u/--update-interval: int Update interval of the script,", "the defaults in bath_config.json. NOTE: runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C)", "parameters: \" + args) # grab the values provided by", "0))) # update the location of the next row of", "algorithm: 1) uses user-provided location to obtain row of pixel", "the map/bathymetry image (ex: ./mapserv.png)\", default=config[\"image\"]) (options, args) = parser.parse_args()", "180) - 90 # wraps to next pole if overflow", "= options.portname loc = options.location rate = options.update_rate delta =", "as e: print(\"Encountered runtime error: \" + e.args[0]) # flush", "Obtain default parameters with open(\"./bathymetry_blink/bathy_config.json\") as f: config = json.load(f)", "\"--image\", dest=\"image_name\", help=\"Name of the map/bathymetry image (ex: ./mapserv.png)\", default=config[\"image\"])", "loc[0] = ((loc[0] + 90) % 180) - 90 #", "for pixel in output_pixels: print(\"Sending r: {}, g: {}, b:", "the LED strip (ex: 70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\", help=\"How", "im = Image.open(i_name) # Can be many different formats. cols,", "pixel data to bt for pixel in output_pixels: print(\"Sending r:", "default=config[\"port\"]) parser.add_option(\"-l\", \"--location\", dest=\"location\", help=\"Location of the center of the", "Blinky Tape port on Raspberry Pi is /dev/ttyACM0 parser =", "min(rows - 1, max(0, (int)(((loc[0] - -90) / (90 -", "in strip (ex: 60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\", dest=\"image_name\", help=\"Name of", "= min(cols - 1, max(0, (int)(((loc[1] - -180) / (180", "of the map/bathymetry image (ex: ./mapserv.png)\", default=config[\"image\"]) (options, args) =", "-90)) * (rows - 0) + 0))) longitude_index = min(cols", "of time Uses the following arguments: -l/--location: tuple Location of", "following algorithm: 1) uses user-provided location to obtain row of", "Wait specified number of minutes # sleep(10) # Wait specified", "in output_pixels: print(\"Sending r: {}, g: {}, b: {}\".format(*pixel)) bt.sendPixel(*pixel)", "bt.show() # delete variables for memory management del a del", "# grab the values provided by user (or defaults) port", "via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C) 2021 <NAME> (https://joeycodes.dev) MIT Licensed \"\"\"", "str Serial port of the BlinkyLight (e.g., 'ttyAMA0', 'COM3'). Defaults", "/dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\", \"--location\", dest=\"location\", help=\"Location of the center of", "Licensed \"\"\" import optparse import json from blinkytape import BlinkyTape", "output_pixels: print(\"Sending r: {}, g: {}, b: {}\".format(*pixel)) bt.sendPixel(*pixel) #", "(int)(((loc[0] - -90) / (90 - -90)) * (rows -", "(90 - -90)) * (rows - 0) + 0))) longitude_index", "Raspberry Pi is /dev/ttyACM0 parser = optparse.OptionParser() parser.add_option(\"-p\", \"--port\", dest=\"portname\",", "arguments: -l/--location: tuple Location of the user in tuple(lat, lon).", "cols, rows = im.size a = np.asarray(im) # of shape", "Tape resets to stored pattern after a few seconds of", "script, in minutes. Defaults to 10. -p/--port: str Serial port", "profile (mins) (ex: 5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\", help=\"Change in", "# sleep(10) # Wait specified number of minutes except KeyboardInterrupt:" ]
[ "in data[key].items(): if multi_value: multi_selects.append(multi_key) output[key] = ', '.join(multi_selects) else:", "for key in data: # flatten list values if isinstance(data[key],", "numbers and building application number elif phone_appnum_key is not None:", "output) else: multi_selects = [] for multi_key, multi_value in data[key].items():", "'originalName' in val: file_names.append(val['originalName']) else: output[key+str(index+1)] = val if len(file_names)", "if 'storage' in val and 'originalName' in val: file_names.append(val['originalName']) else:", "# pylint: disable=R0201 def get_data(self, submission): \"\"\" Get data from", "application number elif phone_appnum_key is not None: if phone_appnum_key ==", "storage, concat filename if 'storage' in val and 'originalName' in", "output[key] = ', '.join(map(str, data[key])) else: file_names = [] for", "', '.join(file_names) # flatten multi select values elif isinstance(data[key], dict):", "submission['data']['permitType'] != 'existingPermitApplication': output = {} data = submission['data'] output['id']", "self.normalize(output) output = self.to_csv(output, sep) return output # pylint: disable=R0201", "and submission['data']['permitType'] != 'existingPermitApplication': output = {} data = submission['data']", "flatten multi select values elif isinstance(data[key], dict): # building use", "i in output if i is not None] output =", "if len(data[key]) > 0: if isinstance(data[key][0], (int, str)): output[key] =", "transform(self, data, sep): \"\"\" transform submissions from export \"\"\" output", "output[key] = self.convert_building_use(key, data[key], data) # flatten nested address fields", "if data: data = self.set_pts_fields(data) for key in data: if", "self.convert_building_use(key, data[key], data) # flatten nested address fields elif FieldConfigs.is_nested_address_field(key):", "= data[key].replace('\\n', '\\t').replace('|', '') # relabel field, if necessary relabel_field", "if self.datetime_valid(data[key]): output[key] = self.pretty_time(data[key]) else: field_key = FieldConfigs.get_field_key(key, 'map')", "sep=sep, line_terminator='\\r\\n') def pretty_format(self, data): \"\"\" Pretty format data fields", "None: if phone_appnum_key == 'phone_fields': output[key] = self.pretty_phonenumber(data[key]) # cleanse", "'pretty') if field_key is not None: output[key] = FieldMaps.map_key_value(field_key, data[key])", "sep): \"\"\" transform submissions from export \"\"\" output = list(map(self.get_data,", "FieldConfigs.is_nested_address_field(key): output = self.convert_address_fields(key, data[key], output) else: multi_selects = []", "data[key] return output def normalize(self, data): \"\"\" Normalize data into", "disable=R0201 def get_data(self, submission): \"\"\" Get data from submission object", "FieldConfigs.get_relabel_fields(key) if relabel_field: output[relabel_field] = output.pop(key) output = self.reorder_fields(output) return", "output[key] = FieldMaps.map_key_value(field_key, data[key]) # manually add Fire Rating and", "elif phone_appnum_key is not None: if phone_appnum_key == 'phone_fields': output[key]", "FieldMaps.map_key_value(field_key, data[key]) # manually add Fire Rating and proposed Fire", "is not None] output = self.normalize(output) output = self.to_csv(output, sep)", "break the csv elif isinstance(data[key], (str, bytes)): output[key] = data[key].replace('\\n',", "output)) output = [i for i in output if i", "# if storage, concat filename if 'storage' in val and", "relabel_field = FieldConfigs.get_relabel_fields(key) if relabel_field: output[relabel_field] = output.pop(key) output =", "<filename>service/transforms/export_submissions.py \"\"\" Export Submissions Transform module \"\"\" #pylint: disable=too-few-public-methods import", "self.add_fire_rating(key, data[key], output) # format phone numbers and building application", "> 0: if isinstance(data[key][0], (int, str)): output[key] = ', '.join(map(str,", "{} if data: data = self.set_pts_fields(data) for key in data:", "= data[key] return output def normalize(self, data): \"\"\" Normalize data", "data, sep): \"\"\" transform submissions from export \"\"\" output =", "field, if necessary relabel_field = FieldConfigs.get_relabel_fields(key) if relabel_field: output[relabel_field] =", "into a flat structure into DataFrame \"\"\" dataframe = pd.json_normalize(data)", "output = list(map(self.get_data, data)) output = list(map(self.pretty_format, output)) output =", "data into a flat structure into DataFrame \"\"\" dataframe =", "in data: # flatten list values if isinstance(data[key], list): if", "\"\"\" transform submissions from export \"\"\" output = list(map(self.get_data, data))", "object \"\"\" # skip permit type = existingPermitApplication submissions #pylint:", "FieldMaps class ExportSubmissionsTransform(TransformBase): \"\"\" Transform for Export Submissions \"\"\" def", "Export Submissions \"\"\" def transform(self, data, sep): \"\"\" transform submissions", "= existingPermitApplication submissions #pylint: disable=too-many-nested-blocks if submission['data']['permitType'] and submission['data']['permitType'] !=", "not None: if phone_appnum_key == 'phone_fields': output[key] = self.pretty_phonenumber(data[key]) #", "export \"\"\" output = list(map(self.get_data, data)) output = list(map(self.pretty_format, output))", "dataframe = pd.json_normalize(data) # update column names dataframe.rename(columns=self.pretty_string, inplace=True) return", "\"\"\" Return CSV from DataFrame \"\"\" return dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n')", "\"\"\" Pretty format data fields \"\"\" output = {} if", "dataframe def to_csv(self, dataframe, sep=','): \"\"\" Return CSV from DataFrame", "flat structure into DataFrame \"\"\" dataframe = pd.json_normalize(data) # update", "# manually add Fire Rating and proposed Fire Rating if", "index, val in enumerate(data[key]): # if storage, concat filename if", "code needs manual process if FieldConfigs.is_building_use(key): output[key] = self.convert_building_use(key, data[key],", "FieldConfigs.get_field_key(key, 'pretty') if field_key is not None: output[key] = FieldMaps.map_key_value(field_key,", "data)) output = list(map(self.pretty_format, output)) output = [i for i", "data) # flatten nested address fields elif FieldConfigs.is_nested_address_field(key): output =", "disable=too-few-public-methods import pandas as pd from .transform import TransformBase from", "file_names = [] for index, val in enumerate(data[key]): # if", "submissions from export \"\"\" output = list(map(self.get_data, data)) output =", "in val and 'originalName' in val: file_names.append(val['originalName']) else: output[key+str(index+1)] =", "#pylint: disable=too-many-nested-blocks if submission['data']['permitType'] and submission['data']['permitType'] != 'existingPermitApplication': output =", "address fields elif FieldConfigs.is_nested_address_field(key): output = self.convert_address_fields(key, data[key], output) else:", "multi_selects = [] for multi_key, multi_value in data[key].items(): if multi_value:", "from submission object \"\"\" # skip permit type = existingPermitApplication", "None] output = self.normalize(output) output = self.to_csv(output, sep) return output", "multi_value in data[key].items(): if multi_value: multi_selects.append(multi_key) output[key] = ', '.join(multi_selects)", "= val if len(file_names) > 0: output[key] = ', '.join(file_names)", "= FieldConfigs.get_field_key(key, 'map') phone_appnum_key = FieldConfigs.get_field_key(key, 'pretty') if field_key is", "..resources.field_configs import FieldConfigs from ..resources.field_maps import FieldMaps class ExportSubmissionsTransform(TransformBase): \"\"\"", "\"\"\" Transform for Export Submissions \"\"\" def transform(self, data, sep):", "data[key])) else: file_names = [] for index, val in enumerate(data[key]):", "import FieldMaps class ExportSubmissionsTransform(TransformBase): \"\"\" Transform for Export Submissions \"\"\"", "Get data from submission object \"\"\" # skip permit type", "Pretty format data fields \"\"\" output = {} if data:", "output = {} if data: data = self.set_pts_fields(data) for key", "needs manual process if FieldConfigs.is_building_use(key): output[key] = self.convert_building_use(key, data[key], data)", "return output def normalize(self, data): \"\"\" Normalize data into a", "data fields \"\"\" output = {} if data: data =", "return output # pylint: disable=R0201 def get_data(self, submission): \"\"\" Get", "into DataFrame \"\"\" dataframe = pd.json_normalize(data) # update column names", "as pd from .transform import TransformBase from ..resources.field_configs import FieldConfigs", "dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n') def pretty_format(self, data): \"\"\" Pretty format data", "len(data[key]) > 0: if isinstance(data[key][0], (int, str)): output[key] = ',", "use code needs manual process if FieldConfigs.is_building_use(key): output[key] = self.convert_building_use(key,", "line_terminator='\\r\\n') def pretty_format(self, data): \"\"\" Pretty format data fields \"\"\"", "if submission['data']['permitType'] and submission['data']['permitType'] != 'existingPermitApplication': output = {} data", "in output if i is not None] output = self.normalize(output)", "list(map(self.pretty_format, output)) output = [i for i in output if", "existingPermitApplication submissions #pylint: disable=too-many-nested-blocks if submission['data']['permitType'] and submission['data']['permitType'] != 'existingPermitApplication':", "if isinstance(data[key][0], (int, str)): output[key] = ', '.join(map(str, data[key])) else:", "add Fire Rating and proposed Fire Rating if field_key ==", "Transform for Export Submissions \"\"\" def transform(self, data, sep): \"\"\"", "building use code needs manual process if FieldConfigs.is_building_use(key): output[key] =", "if len(file_names) > 0: output[key] = ', '.join(file_names) # flatten", "submission): \"\"\" Get data from submission object \"\"\" # skip", "Submissions \"\"\" def transform(self, data, sep): \"\"\" transform submissions from", "else: output[key+str(index+1)] = val if len(file_names) > 0: output[key] =", "submission['data']['permitType'] and submission['data']['permitType'] != 'existingPermitApplication': output = {} data =", "list values if isinstance(data[key], list): if len(data[key]) > 0: if", "isinstance(data[key], (str, bytes)): output[key] = data[key].replace('\\n', '\\t').replace('|', '') # relabel", "# update column names dataframe.rename(columns=self.pretty_string, inplace=True) return dataframe def to_csv(self,", "[i for i in output if i is not None]", "values elif isinstance(data[key], dict): # building use code needs manual", "module \"\"\" #pylint: disable=too-few-public-methods import pandas as pd from .transform", "# format phone numbers and building application number elif phone_appnum_key", "# flatten nested address fields elif FieldConfigs.is_nested_address_field(key): output = self.convert_address_fields(key,", "multi_value: multi_selects.append(multi_key) output[key] = ', '.join(multi_selects) else: output[key] = data[key]", "'.join(file_names) # flatten multi select values elif isinstance(data[key], dict): #", "for key in data: if self.datetime_valid(data[key]): output[key] = self.pretty_time(data[key]) else:", "phone_appnum_key is not None: if phone_appnum_key == 'phone_fields': output[key] =", "cleanse characters that break the csv elif isinstance(data[key], (str, bytes)):", "self.datetime_valid(data[key]): output[key] = self.pretty_time(data[key]) else: field_key = FieldConfigs.get_field_key(key, 'map') phone_appnum_key", "# relabel field, if necessary relabel_field = FieldConfigs.get_relabel_fields(key) if relabel_field:", "multi select values elif isinstance(data[key], dict): # building use code", "i is not None] output = self.normalize(output) output = self.to_csv(output,", "bytes)): output[key] = data[key].replace('\\n', '\\t').replace('|', '') # relabel field, if", "field_key == 'construction_type' and data[key] != '': output = self.add_fire_rating(key,", "is not None: if phone_appnum_key == 'phone_fields': output[key] = self.pretty_phonenumber(data[key])", "== 'phone_fields': output[key] = self.pretty_phonenumber(data[key]) # cleanse characters that break", "self.pretty_time(data[key]) else: field_key = FieldConfigs.get_field_key(key, 'map') phone_appnum_key = FieldConfigs.get_field_key(key, 'pretty')", "in enumerate(data[key]): # if storage, concat filename if 'storage' in", "> 0: output[key] = ', '.join(file_names) # flatten multi select", "', '.join(multi_selects) else: output[key] = data[key] return output def normalize(self,", "data[key].items(): if multi_value: multi_selects.append(multi_key) output[key] = ', '.join(multi_selects) else: output[key]", "= self.convert_building_use(key, data[key], data) # flatten nested address fields elif", "flatten nested address fields elif FieldConfigs.is_nested_address_field(key): output = self.convert_address_fields(key, data[key],", "not None: output[key] = FieldMaps.map_key_value(field_key, data[key]) # manually add Fire", "and building application number elif phone_appnum_key is not None: if", "data): \"\"\" Pretty format data fields \"\"\" output = {}", "if multi_value: multi_selects.append(multi_key) output[key] = ', '.join(multi_selects) else: output[key] =", "names dataframe.rename(columns=self.pretty_string, inplace=True) return dataframe def to_csv(self, dataframe, sep=','): \"\"\"", "# skip permit type = existingPermitApplication submissions #pylint: disable=too-many-nested-blocks if", "(str, bytes)): output[key] = data[key].replace('\\n', '\\t').replace('|', '') # relabel field,", "'phone_fields': output[key] = self.pretty_phonenumber(data[key]) # cleanse characters that break the", "submission object \"\"\" # skip permit type = existingPermitApplication submissions", "list(map(self.get_data, data)) output = list(map(self.pretty_format, output)) output = [i for", "= FieldConfigs.get_relabel_fields(key) if relabel_field: output[relabel_field] = output.pop(key) output = self.reorder_fields(output)", "output = [i for i in output if i is", "submission['created'] #pylint: disable=too-many-nested-blocks for key in data: # flatten list", "data[key], output) # format phone numbers and building application number", "elif isinstance(data[key], (str, bytes)): output[key] = data[key].replace('\\n', '\\t').replace('|', '') #", "nested address fields elif FieldConfigs.is_nested_address_field(key): output = self.convert_address_fields(key, data[key], output)", "= submission['created'] #pylint: disable=too-many-nested-blocks for key in data: # flatten", "Rating and proposed Fire Rating if field_key == 'construction_type' and", "transform submissions from export \"\"\" output = list(map(self.get_data, data)) output", "fields elif FieldConfigs.is_nested_address_field(key): output = self.convert_address_fields(key, data[key], output) else: multi_selects", "== 'construction_type' and data[key] != '': output = self.add_fire_rating(key, data[key],", "= FieldConfigs.get_field_key(key, 'pretty') if field_key is not None: output[key] =", "for i in output if i is not None] output", "\"\"\" Export Submissions Transform module \"\"\" #pylint: disable=too-few-public-methods import pandas", "that break the csv elif isinstance(data[key], (str, bytes)): output[key] =", "Fire Rating and proposed Fire Rating if field_key == 'construction_type'", "building application number elif phone_appnum_key is not None: if phone_appnum_key", "disable=too-many-nested-blocks if submission['data']['permitType'] and submission['data']['permitType'] != 'existingPermitApplication': output = {}", "process if FieldConfigs.is_building_use(key): output[key] = self.convert_building_use(key, data[key], data) # flatten", "= ', '.join(map(str, data[key])) else: file_names = [] for index,", "if FieldConfigs.is_building_use(key): output[key] = self.convert_building_use(key, data[key], data) # flatten nested", "submission['_id'] output['created'] = submission['created'] #pylint: disable=too-many-nested-blocks for key in data:", "output # pylint: disable=R0201 def get_data(self, submission): \"\"\" Get data", "submissions #pylint: disable=too-many-nested-blocks if submission['data']['permitType'] and submission['data']['permitType'] != 'existingPermitApplication': output", "if storage, concat filename if 'storage' in val and 'originalName'", "output) # format phone numbers and building application number elif", "output[key] = data[key] return output def normalize(self, data): \"\"\" Normalize", "= FieldMaps.map_key_value(field_key, data[key]) # manually add Fire Rating and proposed", "normalize(self, data): \"\"\" Normalize data into a flat structure into", "data from submission object \"\"\" # skip permit type =", "ExportSubmissionsTransform(TransformBase): \"\"\" Transform for Export Submissions \"\"\" def transform(self, data,", "from ..resources.field_configs import FieldConfigs from ..resources.field_maps import FieldMaps class ExportSubmissionsTransform(TransformBase):", "= self.pretty_phonenumber(data[key]) # cleanse characters that break the csv elif", "CSV from DataFrame \"\"\" return dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n') def pretty_format(self,", "update column names dataframe.rename(columns=self.pretty_string, inplace=True) return dataframe def to_csv(self, dataframe,", "dataframe.rename(columns=self.pretty_string, inplace=True) return dataframe def to_csv(self, dataframe, sep=','): \"\"\" Return", "= submission['data'] output['id'] = submission['_id'] output['created'] = submission['created'] #pylint: disable=too-many-nested-blocks", "def get_data(self, submission): \"\"\" Get data from submission object \"\"\"", "output = {} data = submission['data'] output['id'] = submission['_id'] output['created']", "for index, val in enumerate(data[key]): # if storage, concat filename", "data[key], data) # flatten nested address fields elif FieldConfigs.is_nested_address_field(key): output", "# flatten multi select values elif isinstance(data[key], dict): # building", "output = list(map(self.pretty_format, output)) output = [i for i in", "\"\"\" output = {} if data: data = self.set_pts_fields(data) for", "output[key+str(index+1)] = val if len(file_names) > 0: output[key] = ',", "data: if self.datetime_valid(data[key]): output[key] = self.pretty_time(data[key]) else: field_key = FieldConfigs.get_field_key(key,", "key in data: # flatten list values if isinstance(data[key], list):", "values if isinstance(data[key], list): if len(data[key]) > 0: if isinstance(data[key][0],", "to_csv(self, dataframe, sep=','): \"\"\" Return CSV from DataFrame \"\"\" return", "= submission['_id'] output['created'] = submission['created'] #pylint: disable=too-many-nested-blocks for key in", "data = submission['data'] output['id'] = submission['_id'] output['created'] = submission['created'] #pylint:", "return dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n') def pretty_format(self, data): \"\"\" Pretty format", "str)): output[key] = ', '.join(map(str, data[key])) else: file_names = []", "sep) return output # pylint: disable=R0201 def get_data(self, submission): \"\"\"", "0: if isinstance(data[key][0], (int, str)): output[key] = ', '.join(map(str, data[key]))", "structure into DataFrame \"\"\" dataframe = pd.json_normalize(data) # update column", "Export Submissions Transform module \"\"\" #pylint: disable=too-few-public-methods import pandas as", "flatten list values if isinstance(data[key], list): if len(data[key]) > 0:", "data): \"\"\" Normalize data into a flat structure into DataFrame", "data: # flatten list values if isinstance(data[key], list): if len(data[key])", "isinstance(data[key], dict): # building use code needs manual process if", "data[key]) # manually add Fire Rating and proposed Fire Rating", "a flat structure into DataFrame \"\"\" dataframe = pd.json_normalize(data) #", "if necessary relabel_field = FieldConfigs.get_relabel_fields(key) if relabel_field: output[relabel_field] = output.pop(key)", "val if len(file_names) > 0: output[key] = ', '.join(file_names) #", "inplace=True) return dataframe def to_csv(self, dataframe, sep=','): \"\"\" Return CSV", ".transform import TransformBase from ..resources.field_configs import FieldConfigs from ..resources.field_maps import", "', '.join(map(str, data[key])) else: file_names = [] for index, val", "len(file_names) > 0: output[key] = ', '.join(file_names) # flatten multi", "Fire Rating if field_key == 'construction_type' and data[key] != '':", "elif FieldConfigs.is_nested_address_field(key): output = self.convert_address_fields(key, data[key], output) else: multi_selects =", "key in data: if self.datetime_valid(data[key]): output[key] = self.pretty_time(data[key]) else: field_key", "= ', '.join(file_names) # flatten multi select values elif isinstance(data[key],", "'') # relabel field, if necessary relabel_field = FieldConfigs.get_relabel_fields(key) if", "output = self.to_csv(output, sep) return output # pylint: disable=R0201 def", "return dataframe def to_csv(self, dataframe, sep=','): \"\"\" Return CSV from", "select values elif isinstance(data[key], dict): # building use code needs", "= self.add_fire_rating(key, data[key], output) # format phone numbers and building", "self.pretty_phonenumber(data[key]) # cleanse characters that break the csv elif isinstance(data[key],", "filename if 'storage' in val and 'originalName' in val: file_names.append(val['originalName'])", "concat filename if 'storage' in val and 'originalName' in val:", "= self.set_pts_fields(data) for key in data: if self.datetime_valid(data[key]): output[key] =", "'.join(multi_selects) else: output[key] = data[key] return output def normalize(self, data):", "manually add Fire Rating and proposed Fire Rating if field_key", "is not None: output[key] = FieldMaps.map_key_value(field_key, data[key]) # manually add", "# cleanse characters that break the csv elif isinstance(data[key], (str,", "column names dataframe.rename(columns=self.pretty_string, inplace=True) return dataframe def to_csv(self, dataframe, sep=','):", "\"\"\" def transform(self, data, sep): \"\"\" transform submissions from export", "import FieldConfigs from ..resources.field_maps import FieldMaps class ExportSubmissionsTransform(TransformBase): \"\"\" Transform", "and proposed Fire Rating if field_key == 'construction_type' and data[key]", "and data[key] != '': output = self.add_fire_rating(key, data[key], output) #", "[] for index, val in enumerate(data[key]): # if storage, concat", "= self.convert_address_fields(key, data[key], output) else: multi_selects = [] for multi_key,", "characters that break the csv elif isinstance(data[key], (str, bytes)): output[key]", "Return CSV from DataFrame \"\"\" return dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n') def", "field_key is not None: output[key] = FieldMaps.map_key_value(field_key, data[key]) # manually", "pylint: disable=R0201 def get_data(self, submission): \"\"\" Get data from submission", "def pretty_format(self, data): \"\"\" Pretty format data fields \"\"\" output", "output if i is not None] output = self.normalize(output) output", "list): if len(data[key]) > 0: if isinstance(data[key][0], (int, str)): output[key]", "number elif phone_appnum_key is not None: if phone_appnum_key == 'phone_fields':", "#pylint: disable=too-few-public-methods import pandas as pd from .transform import TransformBase", "self.to_csv(output, sep) return output # pylint: disable=R0201 def get_data(self, submission):", "elif isinstance(data[key], dict): # building use code needs manual process", "isinstance(data[key][0], (int, str)): output[key] = ', '.join(map(str, data[key])) else: file_names", "the csv elif isinstance(data[key], (str, bytes)): output[key] = data[key].replace('\\n', '\\t').replace('|',", "permit type = existingPermitApplication submissions #pylint: disable=too-many-nested-blocks if submission['data']['permitType'] and", "data[key] != '': output = self.add_fire_rating(key, data[key], output) # format", "= [i for i in output if i is not", "sep=','): \"\"\" Return CSV from DataFrame \"\"\" return dataframe.to_csv(index=False, sep=sep,", "output = self.normalize(output) output = self.to_csv(output, sep) return output #", "\"\"\" # skip permit type = existingPermitApplication submissions #pylint: disable=too-many-nested-blocks", "data = self.set_pts_fields(data) for key in data: if self.datetime_valid(data[key]): output[key]", "(int, str)): output[key] = ', '.join(map(str, data[key])) else: file_names =", "Transform module \"\"\" #pylint: disable=too-few-public-methods import pandas as pd from", "multi_selects.append(multi_key) output[key] = ', '.join(multi_selects) else: output[key] = data[key] return", "= {} data = submission['data'] output['id'] = submission['_id'] output['created'] =", "phone_appnum_key = FieldConfigs.get_field_key(key, 'pretty') if field_key is not None: output[key]", "= list(map(self.pretty_format, output)) output = [i for i in output", "from ..resources.field_maps import FieldMaps class ExportSubmissionsTransform(TransformBase): \"\"\" Transform for Export", "get_data(self, submission): \"\"\" Get data from submission object \"\"\" #", "field_key = FieldConfigs.get_field_key(key, 'map') phone_appnum_key = FieldConfigs.get_field_key(key, 'pretty') if field_key", "else: multi_selects = [] for multi_key, multi_value in data[key].items(): if", "\"\"\" Normalize data into a flat structure into DataFrame \"\"\"", "# flatten list values if isinstance(data[key], list): if len(data[key]) >", "from .transform import TransformBase from ..resources.field_configs import FieldConfigs from ..resources.field_maps", "output[key] = data[key].replace('\\n', '\\t').replace('|', '') # relabel field, if necessary", "file_names.append(val['originalName']) else: output[key+str(index+1)] = val if len(file_names) > 0: output[key]", "data[key].replace('\\n', '\\t').replace('|', '') # relabel field, if necessary relabel_field =", "import pandas as pd from .transform import TransformBase from ..resources.field_configs", "DataFrame \"\"\" dataframe = pd.json_normalize(data) # update column names dataframe.rename(columns=self.pretty_string,", "data[key], output) else: multi_selects = [] for multi_key, multi_value in", "else: output[key] = data[key] return output def normalize(self, data): \"\"\"", "output['id'] = submission['_id'] output['created'] = submission['created'] #pylint: disable=too-many-nested-blocks for key", "dataframe, sep=','): \"\"\" Return CSV from DataFrame \"\"\" return dataframe.to_csv(index=False,", "# building use code needs manual process if FieldConfigs.is_building_use(key): output[key]", "output[key] = self.pretty_time(data[key]) else: field_key = FieldConfigs.get_field_key(key, 'map') phone_appnum_key =", "{} data = submission['data'] output['id'] = submission['_id'] output['created'] = submission['created']", "'construction_type' and data[key] != '': output = self.add_fire_rating(key, data[key], output)", "..resources.field_maps import FieldMaps class ExportSubmissionsTransform(TransformBase): \"\"\" Transform for Export Submissions", "val: file_names.append(val['originalName']) else: output[key+str(index+1)] = val if len(file_names) > 0:", "dict): # building use code needs manual process if FieldConfigs.is_building_use(key):", "for Export Submissions \"\"\" def transform(self, data, sep): \"\"\" transform", "= ', '.join(multi_selects) else: output[key] = data[key] return output def", "def normalize(self, data): \"\"\" Normalize data into a flat structure", "if field_key is not None: output[key] = FieldMaps.map_key_value(field_key, data[key]) #", "output[key] = ', '.join(file_names) # flatten multi select values elif", "!= '': output = self.add_fire_rating(key, data[key], output) # format phone", "FieldConfigs.is_building_use(key): output[key] = self.convert_building_use(key, data[key], data) # flatten nested address", "= {} if data: data = self.set_pts_fields(data) for key in", "pd from .transform import TransformBase from ..resources.field_configs import FieldConfigs from", "pd.json_normalize(data) # update column names dataframe.rename(columns=self.pretty_string, inplace=True) return dataframe def", "FieldConfigs.get_field_key(key, 'map') phone_appnum_key = FieldConfigs.get_field_key(key, 'pretty') if field_key is not", "\"\"\" #pylint: disable=too-few-public-methods import pandas as pd from .transform import", "!= 'existingPermitApplication': output = {} data = submission['data'] output['id'] =", "if relabel_field: output[relabel_field] = output.pop(key) output = self.reorder_fields(output) return output", "skip permit type = existingPermitApplication submissions #pylint: disable=too-many-nested-blocks if submission['data']['permitType']", "DataFrame \"\"\" return dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n') def pretty_format(self, data): \"\"\"", "\"\"\" return dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n') def pretty_format(self, data): \"\"\" Pretty", "'': output = self.add_fire_rating(key, data[key], output) # format phone numbers", "necessary relabel_field = FieldConfigs.get_relabel_fields(key) if relabel_field: output[relabel_field] = output.pop(key) output", "manual process if FieldConfigs.is_building_use(key): output[key] = self.convert_building_use(key, data[key], data) #", "isinstance(data[key], list): if len(data[key]) > 0: if isinstance(data[key][0], (int, str)):", "None: output[key] = FieldMaps.map_key_value(field_key, data[key]) # manually add Fire Rating", "\"\"\" dataframe = pd.json_normalize(data) # update column names dataframe.rename(columns=self.pretty_string, inplace=True)", "'existingPermitApplication': output = {} data = submission['data'] output['id'] = submission['_id']", "0: output[key] = ', '.join(file_names) # flatten multi select values", "output[key] = ', '.join(multi_selects) else: output[key] = data[key] return output", "= pd.json_normalize(data) # update column names dataframe.rename(columns=self.pretty_string, inplace=True) return dataframe", "not None] output = self.normalize(output) output = self.to_csv(output, sep) return", "output = self.convert_address_fields(key, data[key], output) else: multi_selects = [] for", "self.convert_address_fields(key, data[key], output) else: multi_selects = [] for multi_key, multi_value", "val in enumerate(data[key]): # if storage, concat filename if 'storage'", "val and 'originalName' in val: file_names.append(val['originalName']) else: output[key+str(index+1)] = val", "fields \"\"\" output = {} if data: data = self.set_pts_fields(data)", "submission['data'] output['id'] = submission['_id'] output['created'] = submission['created'] #pylint: disable=too-many-nested-blocks for", "class ExportSubmissionsTransform(TransformBase): \"\"\" Transform for Export Submissions \"\"\" def transform(self,", "and 'originalName' in val: file_names.append(val['originalName']) else: output[key+str(index+1)] = val if", "csv elif isinstance(data[key], (str, bytes)): output[key] = data[key].replace('\\n', '\\t').replace('|', '')", "pretty_format(self, data): \"\"\" Pretty format data fields \"\"\" output =", "= [] for multi_key, multi_value in data[key].items(): if multi_value: multi_selects.append(multi_key)", "Normalize data into a flat structure into DataFrame \"\"\" dataframe", "type = existingPermitApplication submissions #pylint: disable=too-many-nested-blocks if submission['data']['permitType'] and submission['data']['permitType']", "import TransformBase from ..resources.field_configs import FieldConfigs from ..resources.field_maps import FieldMaps", "phone numbers and building application number elif phone_appnum_key is not", "Submissions Transform module \"\"\" #pylint: disable=too-few-public-methods import pandas as pd", "else: file_names = [] for index, val in enumerate(data[key]): #", "'.join(map(str, data[key])) else: file_names = [] for index, val in", "'map') phone_appnum_key = FieldConfigs.get_field_key(key, 'pretty') if field_key is not None:", "format phone numbers and building application number elif phone_appnum_key is", "if isinstance(data[key], list): if len(data[key]) > 0: if isinstance(data[key][0], (int,", "def transform(self, data, sep): \"\"\" transform submissions from export \"\"\"", "phone_appnum_key == 'phone_fields': output[key] = self.pretty_phonenumber(data[key]) # cleanse characters that", "proposed Fire Rating if field_key == 'construction_type' and data[key] !=", "Rating if field_key == 'construction_type' and data[key] != '': output", "output = self.add_fire_rating(key, data[key], output) # format phone numbers and", "output[key] = self.pretty_phonenumber(data[key]) # cleanse characters that break the csv", "disable=too-many-nested-blocks for key in data: # flatten list values if", "= list(map(self.get_data, data)) output = list(map(self.pretty_format, output)) output = [i", "output def normalize(self, data): \"\"\" Normalize data into a flat", "TransformBase from ..resources.field_configs import FieldConfigs from ..resources.field_maps import FieldMaps class", "= self.pretty_time(data[key]) else: field_key = FieldConfigs.get_field_key(key, 'map') phone_appnum_key = FieldConfigs.get_field_key(key,", "from export \"\"\" output = list(map(self.get_data, data)) output = list(map(self.pretty_format,", "for multi_key, multi_value in data[key].items(): if multi_value: multi_selects.append(multi_key) output[key] =", "format data fields \"\"\" output = {} if data: data", "multi_key, multi_value in data[key].items(): if multi_value: multi_selects.append(multi_key) output[key] = ',", "from DataFrame \"\"\" return dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n') def pretty_format(self, data):", "= [] for index, val in enumerate(data[key]): # if storage,", "= self.to_csv(output, sep) return output # pylint: disable=R0201 def get_data(self,", "self.set_pts_fields(data) for key in data: if self.datetime_valid(data[key]): output[key] = self.pretty_time(data[key])", "if field_key == 'construction_type' and data[key] != '': output =", "\"\"\" Get data from submission object \"\"\" # skip permit", "in val: file_names.append(val['originalName']) else: output[key+str(index+1)] = val if len(file_names) >", "data: data = self.set_pts_fields(data) for key in data: if self.datetime_valid(data[key]):", "'storage' in val and 'originalName' in val: file_names.append(val['originalName']) else: output[key+str(index+1)]", "'\\t').replace('|', '') # relabel field, if necessary relabel_field = FieldConfigs.get_relabel_fields(key)", "= self.normalize(output) output = self.to_csv(output, sep) return output # pylint:", "\"\"\" output = list(map(self.get_data, data)) output = list(map(self.pretty_format, output)) output", "FieldConfigs from ..resources.field_maps import FieldMaps class ExportSubmissionsTransform(TransformBase): \"\"\" Transform for", "output['created'] = submission['created'] #pylint: disable=too-many-nested-blocks for key in data: #", "enumerate(data[key]): # if storage, concat filename if 'storage' in val", "else: field_key = FieldConfigs.get_field_key(key, 'map') phone_appnum_key = FieldConfigs.get_field_key(key, 'pretty') if", "def to_csv(self, dataframe, sep=','): \"\"\" Return CSV from DataFrame \"\"\"", "if i is not None] output = self.normalize(output) output =", "relabel field, if necessary relabel_field = FieldConfigs.get_relabel_fields(key) if relabel_field: output[relabel_field]", "[] for multi_key, multi_value in data[key].items(): if multi_value: multi_selects.append(multi_key) output[key]", "#pylint: disable=too-many-nested-blocks for key in data: # flatten list values", "in data: if self.datetime_valid(data[key]): output[key] = self.pretty_time(data[key]) else: field_key =", "pandas as pd from .transform import TransformBase from ..resources.field_configs import", "if phone_appnum_key == 'phone_fields': output[key] = self.pretty_phonenumber(data[key]) # cleanse characters" ]
[ "yield address_info # The code after the yield will run", "= trainer.fit() # If loading from a state dict, a", "def test_torch_linear(ray_start_4_cpus, num_workers): def train_func(config): result = linear_train_func(config) assert len(result)", "= ray.data.range(3) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert", "assert len(result) == epochs assert result[-1][\"loss\"] < result[0][\"loss\"] num_workers =", "import TorchPredictor from ray.ml.train.integrations.torch import TorchTrainer from ray import train", "from ray import train from ray.ml.examples.pytorch.torch_linear_example import train_func as linear_train_func", "trainer.fit() predict_dataset = ray.data.range(3) class TorchScorer: def __init__(self): self.pred =", "= trainer.fit() predict_dataset = ray.data.range(3) class TorchScorer: def __init__(self): self.pred", "loading from a state dict, a model definition must be", "{\"lr\": 1e-2, \"hidden_size\": 1, \"batch_size\": 4, \"epochs\": epochs} trainer =", "predictions.count() == 3 if __name__ == \"__main__\": import pytest import", "= ray.data.range(3) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint(result.checkpoint) def", "TorchTrainer from ray import train from ray.ml.examples.pytorch.torch_linear_example import train_func as", "after the yield will run as teardown code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\",", "class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1, 1)", "# The code after the yield will run as teardown", "train from ray.ml.examples.pytorch.torch_linear_example import train_func as linear_train_func @pytest.fixture def ray_start_4_cpus():", "self.pred.predict(x, dtype=torch.float) predict_dataset = ray.data.range(3) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\",", ") trainer.fit() def test_torch_e2e(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1)", "ray_start_4_cpus(): address_info = ray.init(num_cpus=4) yield address_info # The code after", "result[0][\"loss\"] num_workers = num_workers epochs = 3 scaling_config = {\"num_workers\":", "train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config, ) trainer.fit() def test_torch_e2e(ray_start_4_cpus): def train_func(): model", "predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() ==", "= TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predictions =", "TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() predict_dataset = ray.data.range(3)", "import train from ray.ml.examples.pytorch.torch_linear_example import train_func as linear_train_func @pytest.fixture def", "predict_dataset = ray.data.range(3) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint(result.checkpoint)", "result.checkpoint, model=torch.nn.Linear(1, 1) ) def __call__(self, x): return self.pred.predict(x, dtype=torch.float)", "trainer.fit() def test_torch_e2e(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1) train.save_checkpoint(model=model)", "result = trainer.fit() # If loading from a state dict,", "a state dict, a model definition must be passed in.", "model = torch.nn.Linear(1, 1) train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer", "trainer = TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config, ) trainer.fit() def test_torch_e2e(ray_start_4_cpus):", "\"hidden_size\": 1, \"batch_size\": 4, \"epochs\": epochs} trainer = TorchTrainer( train_loop_per_worker=train_func,", "dtype=torch.float) predict_dataset = ray.data.range(3) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\"", "must be passed in. with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer: def", "trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() #", "dtype=torch.float) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count()", "= TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() predict_dataset =", "__init__(self): self.pred = TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1, 1) ) def __call__(self,", "from ray.ml.predictors.integrations.torch import TorchPredictor from ray.ml.train.integrations.torch import TorchTrainer from ray", "address_info # The code after the yield will run as", "= torch.nn.Linear(1, 1) train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer =", "def train_func(config): result = linear_train_func(config) assert len(result) == epochs assert", "@pytest.mark.parametrize(\"num_workers\", [1, 2]) def test_torch_linear(ray_start_4_cpus, num_workers): def train_func(config): result =", "batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3 def test_torch_e2e_state_dict(ray_start_4_cpus): def", "passed in. with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer: def __init__(self): self.pred", "def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predict_dataset = ray.data.range(3) predictions", "compute=\"actors\" ) assert predictions.count() == 3 def test_torch_e2e_state_dict(ray_start_4_cpus): def train_func():", "result[-1][\"loss\"] < result[0][\"loss\"] num_workers = num_workers epochs = 3 scaling_config", "2]) def test_torch_linear(ray_start_4_cpus, num_workers): def train_func(config): result = linear_train_func(config) assert", "definition must be passed in. with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer:", "train_func(): model = torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2}", "train_func as linear_train_func @pytest.fixture def ray_start_4_cpus(): address_info = ray.init(num_cpus=4) yield", "num_workers): def train_func(config): result = linear_train_func(config) assert len(result) == epochs", ") result = trainer.fit() predict_dataset = ray.data.range(3) class TorchScorer: def", "import train_func as linear_train_func @pytest.fixture def ray_start_4_cpus(): address_info = ray.init(num_cpus=4)", "pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint( result.checkpoint,", "ray.data.range(3) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self,", "<reponame>mgelbart/ray<gh_stars>10-100 import pytest import torch import ray from ray.ml.predictors.integrations.torch import", ") result = trainer.fit() # If loading from a state", ") assert predictions.count() == 3 def test_torch_e2e_state_dict(ray_start_4_cpus): def train_func(): model", "code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1, 2]) def test_torch_linear(ray_start_4_cpus, num_workers): def train_func(config):", "TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1, 1) ) def __call__(self, x): return self.pred.predict(x,", "== epochs assert result[-1][\"loss\"] < result[0][\"loss\"] num_workers = num_workers epochs", "= {\"lr\": 1e-2, \"hidden_size\": 1, \"batch_size\": 4, \"epochs\": epochs} trainer", "The code after the yield will run as teardown code.", "= {\"num_workers\": num_workers} config = {\"lr\": 1e-2, \"hidden_size\": 1, \"batch_size\":", ") def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predict_dataset = ray.data.range(3)", "predict_dataset = ray.data.range(3) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" )", "def __init__(self): self.pred = TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self, x): return self.pred.predict(x,", "= TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1, 1) ) def __call__(self, x): return", "TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predictions = predict_dataset.map_batches(", "TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self, x): return", "as teardown code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1, 2]) def test_torch_linear(ray_start_4_cpus, num_workers):", "result = linear_train_func(config) assert len(result) == epochs assert result[-1][\"loss\"] <", "class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self, x):", "TorchPredictor from ray.ml.train.integrations.torch import TorchTrainer from ray import train from", "linear_train_func @pytest.fixture def ray_start_4_cpus(): address_info = ray.init(num_cpus=4) yield address_info #", "ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1, 2]) def test_torch_linear(ray_start_4_cpus, num_workers): def train_func(config): result", "3 def test_torch_e2e_state_dict(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model)", "# If loading from a state dict, a model definition", "as linear_train_func @pytest.fixture def ray_start_4_cpus(): address_info = ray.init(num_cpus=4) yield address_info", "code after the yield will run as teardown code. ray.shutdown()", "1e-2, \"hidden_size\": 1, \"batch_size\": 4, \"epochs\": epochs} trainer = TorchTrainer(", "TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1,", "scaling_config=scaling_config, ) trainer.fit() def test_torch_e2e(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1,", "scaling_config=scaling_config ) result = trainer.fit() predict_dataset = ray.data.range(3) class TorchScorer:", "train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() # If loading from", "= num_workers epochs = 3 scaling_config = {\"num_workers\": num_workers} config", "self.pred.predict(x, dtype=torch.float) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert", "def test_torch_e2e_state_dict(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model) scaling_config", "TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3 if __name__", "{\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result =", "__name__ == \"__main__\": import pytest import sys sys.exit(pytest.main([\"-v\", \"-x\", __file__]))", "def train_func(): model = torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model) scaling_config = {\"num_workers\":", "import ray from ray.ml.predictors.integrations.torch import TorchPredictor from ray.ml.train.integrations.torch import TorchTrainer", "def test_torch_e2e(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1) train.save_checkpoint(model=model) scaling_config", "\"batch_size\": 4, \"epochs\": epochs} trainer = TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config,", "ray.data.range(3) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count()", "4, \"epochs\": epochs} trainer = TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config, )", "epochs} trainer = TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config, ) trainer.fit() def", "from ray.ml.train.integrations.torch import TorchTrainer from ray import train from ray.ml.examples.pytorch.torch_linear_example", "def train_func(): model = torch.nn.Linear(1, 1) train.save_checkpoint(model=model) scaling_config = {\"num_workers\":", "predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3 def", "assert predictions.count() == 3 def test_torch_e2e_state_dict(ray_start_4_cpus): def train_func(): model =", "train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config", "torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer = TorchTrainer(", "= {\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result", "the yield will run as teardown code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1,", "train_func(): model = torch.nn.Linear(1, 1) train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2}", "return self.pred.predict(x, dtype=torch.float) predict_dataset = ray.data.range(3) predictions = predict_dataset.map_batches( TorchScorer,", "ray.ml.train.integrations.torch import TorchTrainer from ray import train from ray.ml.examples.pytorch.torch_linear_example import", "test_torch_e2e_state_dict(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model) scaling_config =", "1).state_dict() train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func,", "dict, a model definition must be passed in. with pytest.raises(ValueError):", "ray.init(num_cpus=4) yield address_info # The code after the yield will", "{\"num_workers\": num_workers} config = {\"lr\": 1e-2, \"hidden_size\": 1, \"batch_size\": 4,", "assert predictions.count() == 3 if __name__ == \"__main__\": import pytest", "@pytest.fixture def ray_start_4_cpus(): address_info = ray.init(num_cpus=4) yield address_info # The", "return self.pred.predict(x, dtype=torch.float) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" )", "num_workers = num_workers epochs = 3 scaling_config = {\"num_workers\": num_workers}", "= 3 scaling_config = {\"num_workers\": num_workers} config = {\"lr\": 1e-2,", "x): return self.pred.predict(x, dtype=torch.float) predict_dataset = ray.data.range(3) predictions = predict_dataset.map_batches(", "train_func(config): result = linear_train_func(config) assert len(result) == epochs assert result[-1][\"loss\"]", "= torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer =", "num_workers epochs = 3 scaling_config = {\"num_workers\": num_workers} config =", "3 scaling_config = {\"num_workers\": num_workers} config = {\"lr\": 1e-2, \"hidden_size\":", "TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3 def test_torch_e2e_state_dict(ray_start_4_cpus):", "test_torch_linear(ray_start_4_cpus, num_workers): def train_func(config): result = linear_train_func(config) assert len(result) ==", "1) ) def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predict_dataset =", "train_loop_config=config, scaling_config=scaling_config, ) trainer.fit() def test_torch_e2e(ray_start_4_cpus): def train_func(): model =", "yield will run as teardown code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1, 2])", "config = {\"lr\": 1e-2, \"hidden_size\": 1, \"batch_size\": 4, \"epochs\": epochs}", "x): return self.pred.predict(x, dtype=torch.float) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\"", "be passed in. with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer: def __init__(self):", "predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3 if", "address_info = ray.init(num_cpus=4) yield address_info # The code after the", "self.pred = TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predictions", "scaling_config=scaling_config ) result = trainer.fit() # If loading from a", "train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() predict_dataset = ray.data.range(3) class", "1) train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func,", "self.pred = TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1, 1) ) def __call__(self, x):", "TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1, 1) )", "__call__(self, x): return self.pred.predict(x, dtype=torch.float) predict_dataset = ray.data.range(3) predictions =", "trainer.fit() # If loading from a state dict, a model", "ray.ml.predictors.integrations.torch import TorchPredictor from ray.ml.train.integrations.torch import TorchTrainer from ray import", "model = torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer", "scaling_config = {\"num_workers\": num_workers} config = {\"lr\": 1e-2, \"hidden_size\": 1,", "TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() # If loading", "1, \"batch_size\": 4, \"epochs\": epochs} trainer = TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config,", "from a state dict, a model definition must be passed", "will run as teardown code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1, 2]) def", "\"epochs\": epochs} trainer = TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config, ) trainer.fit()", "< result[0][\"loss\"] num_workers = num_workers epochs = 3 scaling_config =", "if __name__ == \"__main__\": import pytest import sys sys.exit(pytest.main([\"-v\", \"-x\",", "model=torch.nn.Linear(1, 1) ) def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predict_dataset", "__call__(self, x): return self.pred.predict(x, dtype=torch.float) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\",", "def __init__(self): self.pred = TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1, 1) ) def", "3 if __name__ == \"__main__\": import pytest import sys sys.exit(pytest.main([\"-v\",", "linear_train_func(config) assert len(result) == epochs assert result[-1][\"loss\"] < result[0][\"loss\"] num_workers", "teardown code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1, 2]) def test_torch_linear(ray_start_4_cpus, num_workers): def", "model definition must be passed in. with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class", "epochs assert result[-1][\"loss\"] < result[0][\"loss\"] num_workers = num_workers epochs =", "def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predictions = predict_dataset.map_batches( TorchScorer,", "[1, 2]) def test_torch_linear(ray_start_4_cpus, num_workers): def train_func(config): result = linear_train_func(config)", "batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3 if __name__ ==", "compute=\"actors\" ) assert predictions.count() == 3 if __name__ == \"__main__\":", ") assert predictions.count() == 3 if __name__ == \"__main__\": import", "= ray.init(num_cpus=4) yield address_info # The code after the yield", "trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() predict_dataset", "torch.nn.Linear(1, 1) train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer = TorchTrainer(", "ray.ml.examples.pytorch.torch_linear_example import train_func as linear_train_func @pytest.fixture def ray_start_4_cpus(): address_info =", "2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit()", "assert result[-1][\"loss\"] < result[0][\"loss\"] num_workers = num_workers epochs = 3", "len(result) == epochs assert result[-1][\"loss\"] < result[0][\"loss\"] num_workers = num_workers", "with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint(", "import TorchTrainer from ray import train from ray.ml.examples.pytorch.torch_linear_example import train_func", "num_workers} config = {\"lr\": 1e-2, \"hidden_size\": 1, \"batch_size\": 4, \"epochs\":", "state dict, a model definition must be passed in. with", "result = trainer.fit() predict_dataset = ray.data.range(3) class TorchScorer: def __init__(self):", "TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config, ) trainer.fit() def test_torch_e2e(ray_start_4_cpus): def train_func():", "= linear_train_func(config) assert len(result) == epochs assert result[-1][\"loss\"] < result[0][\"loss\"]", "If loading from a state dict, a model definition must", "= TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config, ) trainer.fit() def test_torch_e2e(ray_start_4_cpus): def", "ray import train from ray.ml.examples.pytorch.torch_linear_example import train_func as linear_train_func @pytest.fixture", "def ray_start_4_cpus(): address_info = ray.init(num_cpus=4) yield address_info # The code", "predictions.count() == 3 def test_torch_e2e_state_dict(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1,", "ray from ray.ml.predictors.integrations.torch import TorchPredictor from ray.ml.train.integrations.torch import TorchTrainer from", "= TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() # If", "run as teardown code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1, 2]) def test_torch_linear(ray_start_4_cpus,", "import pytest import torch import ray from ray.ml.predictors.integrations.torch import TorchPredictor", "pytest import torch import ray from ray.ml.predictors.integrations.torch import TorchPredictor from", "torch import ray from ray.ml.predictors.integrations.torch import TorchPredictor from ray.ml.train.integrations.torch import", "import torch import ray from ray.ml.predictors.integrations.torch import TorchPredictor from ray.ml.train.integrations.torch", "== 3 def test_torch_e2e_state_dict(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1).state_dict()", "epochs = 3 scaling_config = {\"num_workers\": num_workers} config = {\"lr\":", "test_torch_e2e(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1) train.save_checkpoint(model=model) scaling_config =", "a model definition must be passed in. with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint)", "in. with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer: def __init__(self): self.pred =", "scaling_config = {\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config )", "__init__(self): self.pred = TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self, x): return self.pred.predict(x, dtype=torch.float)", "== 3 if __name__ == \"__main__\": import pytest import sys", "= predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3", "from ray.ml.examples.pytorch.torch_linear_example import train_func as linear_train_func @pytest.fixture def ray_start_4_cpus(): address_info" ]
[ "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A", "**kwargs): # type: (*Any, **Any) -> Union[str, None] \"\"\"Executes the", "Specifies the rate at which IxNetwork sends learn frames to", "item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyITWizardConfiguration', payload=payload, response_object=None)", "specified Quick Test to be completed. waitForTest(async_operation=bool)list ------------------------------------- - async_operation", "# Copyright 1997 - 2020 by IXIA Keysight # #", "(*Any, **Any) -> None \"\"\"Executes the applyAsync operation on the", "int, bool, bool, int, int) -> LearnFrames \"\"\"Updates learnFrames resource", "learn frames to the DUT. - LearnSendMacOnly (bool): Sends learning", "permission notice shall be included in # all copies or", "specified Quick Test. apply(async_operation=bool) --------------------------- - async_operation (bool=False): True to", "value) @property def LearnFrameSize(self): # type: () -> int \"\"\"", "portions of the Software. # # THE SOFTWARE IS PROVIDED", "the server. Args ---- - FastPathEnable (bool): If true, enables", "\"\"\"Updates learnFrames resource on the server. Args ---- - FastPathEnable", "learning frames from all the ports. - LearnWaitTimeBeforeTransmit (number): Specifies", "operation on the server. Applies the specified Quick Test. applyITWizardConfiguration(async_operation=bool)", "-> bool \"\"\" Returns ------- - bool: Sends router solicitation", "(number): Specifies the number of learning frames that IxNetwork sends", "LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): # type:", "| onTrial): Allows to choose how frequently IxNetwork sends learning", "Allows to choose how frequently IxNetwork sends learning frames during", "of learn frames that IxNetwork sends through fast path. \"\"\"", "| oncePerTest | onTrial): Allows to choose how frequently IxNetwork", "a PDF report for the last succesfull test run. generateReport(async_operation=bool)string", "of the test. run(InputParameters=string, async_operation=bool)list ----------------------------------------------------- - InputParameters (str): The", "-> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property def LearnNumFrames(self): # type: ()", "resource which will be retrieved from the server every time", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #", "does not. run(async_operation=bool)list ----------------------------- - async_operation (bool=False): True to execute", "self._execute('applyITWizardConfiguration', payload=payload, response_object=None) def GenerateReport(self, *args, **kwargs): # type: (*Any,", "included in # all copies or substantial portions of the", "Connection class will block until the operation is complete. start(InputParameters=string,", "IxNetwork pauses before sending all the Raises ------ - ServerError:", "Test. stop(async_operation=bool) -------------------------- - async_operation (bool=False): True to execute the", "before sending all the \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self,", "= args[i] for item in kwargs.items(): payload[item[0]] = item[1] return", "calls made through the Connection class will block until the", "value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property def", "- Returns str: This method is asynchronous and has no", "learning frames in the fast path. - FastPathNumFrames (number): Specifies", "\"\"\" Returns ------- - bool: If true, enables fast path", "'learnFrames' _SDM_ATT_MAP = { 'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames',", "def LearnSendMacOnly(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value)", "# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "size of the learning frames. - LearnFrequency (str(never | onBinaryIteration", "its execution to finish. The IxNetwork model allows for multiple", "LearnRate(self): # type: () -> int \"\"\" Returns ------- -", "__init__(self, parent, list_op=False): super(LearnFrames, self).__init__(parent, list_op) @property def FastPathEnable(self): #", "# type: () -> str \"\"\" Returns ------- - str(never", "asynchronously. Any subsequent rest api calls made through the Connection", "onBinaryIteration | oncePerFramesize | oncePerTest | onTrial): Allows to choose", "from all the ports. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def LearnWaitTime(self,", "is complete. - Returns bool: Raises ------ - NotFoundError: The", "complete. start(InputParameters=string, async_operation=bool) --------------------------------------------------- - InputParameters (str): The input arguments", "Test. apply(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute the", "int \"\"\" Returns ------- - number: Specifies the rate at", "a copy # of this software and associated documentation files", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF", "notice shall be included in # all copies or substantial", "of the learning frames in the fast path. - FastPathNumFrames", "value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property def", "and this permission notice shall be included in # all", "(bool): Sends learning frames to MAC address only. - LearnSendRouterSolicitation", "2020 by IXIA Keysight # # Permission is hereby granted,", "PDF report for the last succesfull test run. generateReport(async_operation=bool)string ------------------------------------------", "to be completed. waitForTest(async_operation=bool)list ------------------------------------- - async_operation (bool=False): True to", "and to permit persons to whom the # Software is", "frames through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def FastPathRate(self,", "learning frames during the test. - LearnNumFrames (number): Specifies the", "until the operation is complete. start(InputParameters=string, async_operation=bool) --------------------------------------------------- - InputParameters", "IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS", "Returns ------- - number: Specifies the length of time in", "@FastPathNumFrames.setter def FastPathNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'],", "which IxNetwork sends learn frames to the DUT. \"\"\" return", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self, value): # type: (int)", "(the \"Software\"), # to deal in the Software without restriction,", "The server has encountered an uncategorized error condition \"\"\" payload", "'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP = { 'learnFrequency': ['never', 'onBinaryIteration',", "def LearnWaitTimeBeforeTransmit(self): # type: () -> int \"\"\" Returns -------", "self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property def FastPathLearnFrameSize(self): # type: () -> int", "required learnFrames resource which will be retrieved from the server", "applyAsync operation on the server. applyAsync(async_operation=bool) -------------------------------- - async_operation (bool=False):", "the rights to use, copy, modify, merge, publish, distribute, sublicense,", "which will be retrieved from the server every time the", "the server. Applies the specified Quick Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ -", "exist on the server - ServerError: The server has encountered", "the Software, and to permit persons to whom the #", "run operation on the server. Starts the specified Quick Test", "is hereby granted, free of charge, to any person obtaining", "payload['Arg%s' % (i + 2)] = args[i] for item in", "\"\"\"Executes the run operation on the server. Starts the specified", "of learning frames that IxNetwork sends for each address. -", "associated documentation files (the \"Software\"), # to deal in the", "result of the test. Raises ------ - NotFoundError: The requested", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR", "return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def LearnFrequency(self, value): # type: (str) ->", "applyAsync(async_operation=bool) -------------------------------- - async_operation (bool=False): True to execute the operation", "person obtaining a copy # of this software and associated", "OR OTHER DEALINGS IN # THE SOFTWARE. from uhd_restpy.base import", "None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property def LearnSendRouterSolicitation(self): # type: () ->", "- LearnWaitTimeBeforeTransmit (number): Specifies the length of time in ms", "type: (*Any, **Any) -> None \"\"\"Executes the stop operation on", "stop(async_operation=bool) -------------------------- - async_operation (bool=False): True to execute the operation", "size of the learning frames in the fast path. \"\"\"", "execution of the specified Quick Test to be completed. waitForTest(async_operation=bool)list", "['never', 'onBinaryIteration', 'oncePerFramesize', 'oncePerTest', 'onTrial'], } def __init__(self, parent, list_op=False):", "the server. Generate a PDF report for the last succesfull", "'onBinaryIteration', 'oncePerFramesize', 'oncePerTest', 'onTrial'], } def __init__(self, parent, list_op=False): super(LearnFrames,", "type: (*Any, **Any) -> None \"\"\"Executes the applyITWizardConfiguration operation on", "-> int \"\"\" Returns ------- - number: Specifies the number", "while python does not. run(async_operation=bool)list ----------------------------- - async_operation (bool=False): True", "@property def LearnFrameSize(self): # type: () -> int \"\"\" Returns", "Raises ------ - ServerError: The server has encountered an uncategorized", "Returns list(str): This method is synchronous and returns the result", "until the operation is complete. - Returns bool: Raises ------", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "int, int, int, int, str, int, int, bool, bool, int,", "# type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property def LearnRate(self):", "self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property def LearnWaitTimeBeforeTransmit(self): # type: () -> int", "on the server. applyAsync(async_operation=bool) -------------------------------- - async_operation (bool=False): True to", "return self._execute('start', payload=payload, response_object=None) def Stop(self, *args, **kwargs): # type:", "\"\"\" Returns ------- - bool: Sends learning frames to MAC", "None] \"\"\"Executes the waitForTest operation on the server. Waits for", "learn frames that IxNetwork sends through fast path. - FastPathRate", "self._execute('start', payload=payload, response_object=None) def Stop(self, *args, **kwargs): # type: (*Any,", "# Permission is hereby granted, free of charge, to any", "a required learnFrames resource which will be retrieved from the", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #", "IxNetwork model allows for multiple method Signatures with the same", "that IxNetwork sends for each address. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter", "in the fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self,", "all the Raises ------ - ServerError: The server has encountered", "the server - ServerError: The server has encountered an uncategorized", "learning frames from all the ports. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter", "sends learn frames through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter", "self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def LearnFrequency(self, value): # type: (str) -> None", "method Signatures with the same name while python does not.", "is synchronous and returns the result of the test. run(InputParameters=string,", "item[1] return self._execute('applyAsyncResult', payload=payload, response_object=None) def ApplyITWizardConfiguration(self, *args, **kwargs): #", "to the DUT. - LearnSendMacOnly (bool): Sends learning frames to", "-> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property def FastPathNumFrames(self): # type: ()", "not. run(async_operation=bool)list ----------------------------- - async_operation (bool=False): True to execute the", "return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def FastPathEnable(self, value): # type: (bool) ->", "+ 2)] = args[i] for item in kwargs.items(): payload[item[0]] =", "the Connection class will block until the operation is complete.", "ms that IxNetwork pauses before sending all the \"\"\" return", "(int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property def LearnFrequency(self): # type:", "def LearnSendRouterSolicitation(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value)", "**Any) -> Union[str, None] \"\"\"Executes the generateReport operation on the", "- FastPathNumFrames (number): Specifies the number of learn frames that", "kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsync', payload=payload, response_object=None) def ApplyAsyncResult(self,", "response_object=None) def ApplyAsync(self, *args, **kwargs): # type: (*Any, **Any) ->", "'oncePerTest', 'onTrial'], } def __init__(self, parent, list_op=False): super(LearnFrames, self).__init__(parent, list_op)", "Test to be completed. waitForTest(async_operation=bool)list ------------------------------------- - async_operation (bool=False): True", "which IxNetwork sends learn frames to the DUT. - LearnSendMacOnly", "type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property def LearnRate(self): #", "learn frames through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def", "2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1]", "return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self, value): # type: (bool) ->", "fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self, value): #", "---- - FastPathEnable (bool): If true, enables fast path transmit.", "Any, Union class LearnFrames(Base): \"\"\"The learning frames that IxNetwork sends", "for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('start', payload=payload,", "at which IxNetwork sends learn frames to the DUT. -", "- LearnFrameSize (number): Specifies the size of the learning frames.", "@LearnFrequency.setter def LearnFrequency(self, value): # type: (str) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'],", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "name while python does not. run(async_operation=bool)list ----------------------------- - async_operation (bool=False):", "\"Arg1\": self.href } for i in range(len(args)): payload['Arg%s' % (i", "payload[item[0]] = item[1] return self._execute('run', payload=payload, response_object=None) def Start(self, *args,", "def FastPathNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value)", "} for i in range(len(args)): payload['Arg%s' % (i + 2)]", "the operation is complete. - Returns list(str): This method is", "str(never | onBinaryIteration | oncePerFramesize | oncePerTest | onTrial): Allows", "number: Specifies the size of the learning frames in the", "@property def FastPathNumFrames(self): # type: () -> int \"\"\" Returns", "| onTrial)): Allows to choose how frequently IxNetwork sends learning", "test. Raises ------ - NotFoundError: The requested resource does not", "FastPathEnable (bool): If true, enables fast path transmit. - FastPathLearnFrameSize", "def LearnFrameSize(self): # type: () -> int \"\"\" Returns -------", "how frequently IxNetwork sends learning frames during the test. \"\"\"", "server. Applies the specified Quick Test. apply(async_operation=bool) --------------------------- - async_operation", "sends for each address. - LearnRate (number): Specifies the rate", "------- - number: Specifies the rate at which IxNetwork sends", "onTrial): Allows to choose how frequently IxNetwork sends learning frames", "operation on the server. Generate a PDF report for the", "in ms that IxNetwork pauses before sending all the \"\"\"", "in kwargs.items(): payload[item[0]] = item[1] return self._execute('start', payload=payload, response_object=None) def", "test run. generateReport(async_operation=bool)string ------------------------------------------ - async_operation (bool=False): True to execute", "self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property def FastPathNumFrames(self): # type: () -> int", "the server every time the property is accessed. \"\"\" __slots__", "frames during the test. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def LearnFrequency(self,", "| oncePerTest | onTrial)): Allows to choose how frequently IxNetwork", "value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property def", "int, int, bool, bool, int, int) -> LearnFrames \"\"\"Updates learnFrames", "item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsyncResult', payload=payload, response_object=None)", "def Stop(self, *args, **kwargs): # type: (*Any, **Any) -> None", "Union[List[str], None] \"\"\"Executes the waitForTest operation on the server. Waits", "the size of the learning frames. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter", "payload[item[0]] = item[1] return self._execute('applyAsync', payload=payload, response_object=None) def ApplyAsyncResult(self, *args,", "solicitation messages. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self, value): #", "path. - FastPathNumFrames (number): Specifies the number of learn frames", "payload = { \"Arg1\": self.href } for i in range(len(args)):", "ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED", "**kwargs): # type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes the", "free of charge, to any person obtaining a copy #", "operation on the server. Starts the specified Quick Test. The", "use, copy, modify, merge, publish, distribute, sublicense, # and/or sell", "result of the test. run(InputParameters=string, async_operation=bool)list ----------------------------------------------------- - InputParameters (str):", "the applyAsync operation on the server. applyAsync(async_operation=bool) -------------------------------- - async_operation", "complete. - Returns list(str): This method is synchronous and returns", "# and/or sell copies of the Software, and to permit", "oncePerFramesize | oncePerTest | onTrial): Allows to choose how frequently", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "input arguments of the test. - async_operation (bool=False): True to", "Start(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes", "type: () -> int \"\"\" Returns ------- - number: Specifies", "@property def LearnSendMacOnly(self): # type: () -> bool \"\"\" Returns", "def WaitForTest(self, *args, **kwargs): # type: (*Any, **Any) -> Union[List[str],", "frames during the test. - LearnNumFrames (number): Specifies the number", "does not exist on the server - ServerError: The server", "returns the result of the test. run(InputParameters=string, async_operation=bool)list ----------------------------------------------------- -", "(int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property def LearnRate(self): # type:", "def Apply(self, *args, **kwargs): # type: (*Any, **Any) -> None", "on the server. Starts the specified Quick Test and waits", "server. Applies the specified Quick Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ - async_operation", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "# type: (*Any, **Any) -> None \"\"\"Executes the applyITWizardConfiguration operation", "self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self, value): # type: (bool) -> None", "server. applyAsyncResult(async_operation=bool)bool ------------------------------------------ - async_operation (bool=False): True to execute the", "payload[item[0]] = item[1] return self._execute('applyAsyncResult', payload=payload, response_object=None) def ApplyITWizardConfiguration(self, *args,", "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "(int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None,", "path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self, value): # type:", "self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def Apply(self, *args, **kwargs): # type: (*Any, **Any)", "@property def LearnRate(self): # type: () -> int \"\"\" Returns", "args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('stop',", "ServerError: The server has encountered an uncategorized error condition \"\"\"", "int, str, int, int, bool, bool, int, int) -> LearnFrames", "Copyright 1997 - 2020 by IXIA Keysight # # Permission", "charge, to any person obtaining a copy # of this", "_SDM_NAME = 'learnFrames' _SDM_ATT_MAP = { 'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize',", "------- - number: Specifies the size of the learning frames.", "LearnSendMacOnly(self): # type: () -> bool \"\"\" Returns ------- -", "Quick Test. stop(async_operation=bool) -------------------------- - async_operation (bool=False): True to execute", "this permission notice shall be included in # all copies", "LearnFrequency(self): # type: () -> str \"\"\" Returns ------- -", "-> LearnFrames \"\"\"Updates learnFrames resource on the server. Args ----", "all the learning frames from all the ports. - LearnWaitTimeBeforeTransmit", "same name while python does not. start(async_operation=bool) --------------------------- - async_operation", "allows for multiple method Signatures with the same name while", "type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property def LearnSendMacOnly(self): #", "list_op) @property def FastPathEnable(self): # type: () -> bool \"\"\"", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def LearnFrequency(self, value): # type: (str)", "condition \"\"\" payload = { \"Arg1\": self.href } for i", "execution to finish. The IxNetwork model allows for multiple method", "list(str): This method is synchronous and returns the result of", "is furnished to do so, subject to the following conditions:", "Generate a PDF report for the last succesfull test run.", "**kwargs): # type: (*Any, **Any) -> Union[bool, None] \"\"\"Executes the", "Union[bool, None] \"\"\"Executes the applyAsyncResult operation on the server. applyAsyncResult(async_operation=bool)bool", "LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): # type: (bool, int, int, int, int,", "self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def LearnFrameSize(self, value): # type: (int) -> None", "\"\"\"The learning frames that IxNetwork sends during the test. The", "number of learn frames that IxNetwork sends through fast path.", "payload=payload, response_object=None) def Stop(self, *args, **kwargs): # type: (*Any, **Any)", "to MAC address only. - LearnSendRouterSolicitation (bool): Sends router solicitation", "def ApplyAsync(self, *args, **kwargs): # type: (*Any, **Any) -> None", "-> None \"\"\"Executes the apply operation on the server. Applies", "the server. applyAsync(async_operation=bool) -------------------------------- - async_operation (bool=False): True to execute", "to MAC address only. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def LearnSendMacOnly(self,", "**kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the applyAsync", "LearnFrameSize (number): Specifies the size of the learning frames. -", "# type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property def LearnSendRouterSolicitation(self):", "enables fast path transmit. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def FastPathEnable(self,", "learnFrames resource which will be retrieved from the server every", "response_object=None) def ApplyITWizardConfiguration(self, *args, **kwargs): # type: (*Any, **Any) ->", "self._execute('generateReport', payload=payload, response_object=None) def Run(self, *args, **kwargs): # type: (*Any,", "bool \"\"\" Returns ------- - bool: Sends learning frames to", "super(LearnFrames, self).__init__(parent, list_op) @property def FastPathEnable(self): # type: () ->", "frames from all the ports. - LearnWaitTimeBeforeTransmit (number): Specifies the", "operation is complete. Raises ------ - NotFoundError: The requested resource", "() -> str \"\"\" Returns ------- - str(never | onBinaryIteration", "rate at which IxNetwork sends learn frames through fast path.", "\"\"\" Returns ------- - number: Specifies the rate at which", "at which IxNetwork sends learn frames through fast path. -", "@LearnNumFrames.setter def LearnNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'],", "'LearnNumFrames': 'learnNumFrames', 'LearnRate': 'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime',", "all the ports. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def LearnWaitTime(self, value):", "encapsulates a required learnFrames resource which will be retrieved from", "the test. run(InputParameters=string, async_operation=bool)list ----------------------------------------------------- - InputParameters (str): The input", "IxNetwork sends learn frames through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate'])", "# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "address only. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def LearnSendMacOnly(self, value): #", "None \"\"\"Executes the applyAsync operation on the server. applyAsync(async_operation=bool) --------------------------------", "- number: Specifies the number of learn frames that IxNetwork", "'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', }", "path transmit. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def FastPathEnable(self, value): #", "type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes the run operation", "# type: (*Any, **Any) -> None \"\"\"Executes the stop operation", "<filename>uhd_restpy/testplatform/sessions/ixnetwork/quicktest/learnframes_58e01d83db5d99bcabff902f5cf6ec51.py # MIT LICENSE # # Copyright 1997 - 2020", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def FastPathRate(self, value): # type: (int)", "SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE", "(int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property def FastPathNumFrames(self): # type:", "FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None):", "None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property def LearnFrameSize(self): # type: () ->", "not exist on the server - ServerError: The server has", "self._execute('applyAsyncResult', payload=payload, response_object=None) def ApplyITWizardConfiguration(self, *args, **kwargs): # type: (*Any,", "-> Union[str, None] \"\"\"Executes the generateReport operation on the server.", "apply(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute the operation", "the waitForTest operation on the server. Waits for the execution", "= () _SDM_NAME = 'learnFrames' _SDM_ATT_MAP = { 'FastPathEnable': 'fastPathEnable',", "is accessed. \"\"\" __slots__ = () _SDM_NAME = 'learnFrames' _SDM_ATT_MAP", "Software without restriction, including without limitation # the rights to", "LearnSendMacOnly (bool): Sends learning frames to MAC address only. -", "operation on the server. Waits for the execution of the", "specified Quick Test. The IxNetwork model allows for multiple method", "DUT. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def LearnRate(self, value): # type:", "Args ---- - FastPathEnable (bool): If true, enables fast path", "learning frames in the fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter", "int \"\"\" Returns ------- - number: Specifies the size of", "frames. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def LearnFrameSize(self, value): # type:", "whom the # Software is furnished to do so, subject", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def LearnWaitTime(self, value): # type: (int)", "self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def LearnSendMacOnly(self, value): # type: (bool) -> None", "applyAsyncResult operation on the server. applyAsyncResult(async_operation=bool)bool ------------------------------------------ - async_operation (bool=False):", "- FastPathLearnFrameSize (number): Specifies the size of the learning frames", "applyAsyncResult(async_operation=bool)bool ------------------------------------------ - async_operation (bool=False): True to execute the operation", "in kwargs.items(): payload[item[0]] = item[1] return self._execute('apply', payload=payload, response_object=None) def", "LearnFrames \"\"\"Updates learnFrames resource on the server. Args ---- -", "from typing import List, Any, Union class LearnFrames(Base): \"\"\"The learning", "IxNetwork sends learning frames during the test. - LearnNumFrames (number):", "return self._execute('applyAsyncResult', payload=payload, response_object=None) def ApplyITWizardConfiguration(self, *args, **kwargs): # type:", "for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('generateReport', payload=payload,", "in range(len(args)): payload['Arg%s' % (i + 2)] = args[i] for", "the DUT. - LearnSendMacOnly (bool): Sends learning frames to MAC", "synchronous and returns the result of the test. Raises ------", "return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def FastPathRate(self, value): # type: (int) ->", "def ApplyAsyncResult(self, *args, **kwargs): # type: (*Any, **Any) -> Union[bool,", "kwargs.items(): payload[item[0]] = item[1] return self._execute('apply', payload=payload, response_object=None) def ApplyAsync(self,", "async_operation=bool) --------------------------------------------------- - InputParameters (str): The input arguments of the", "the operation is complete. - Returns bool: Raises ------ -", "self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def FastPathEnable(self, value): # type: (bool) -> None", "for multiple method Signatures with the same name while python", "each address. - LearnRate (number): Specifies the rate at which", "generateReport(async_operation=bool)string ------------------------------------------ - async_operation (bool=False): True to execute the operation", "list_op=False): super(LearnFrames, self).__init__(parent, list_op) @property def FastPathEnable(self): # type: ()", "'oncePerFramesize', 'oncePerTest', 'onTrial'], } def __init__(self, parent, list_op=False): super(LearnFrames, self).__init__(parent,", "that IxNetwork pauses before sending all the Raises ------ -", "Starts the specified Quick Test and waits for its execution", "DEALINGS IN # THE SOFTWARE. from uhd_restpy.base import Base from", "class will block until the operation is complete. - Returns", "*args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the", "Quick Test. apply(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute", "@property def FastPathRate(self): # type: () -> int \"\"\" Returns", "= { 'learnFrequency': ['never', 'onBinaryIteration', 'oncePerFramesize', 'oncePerTest', 'onTrial'], } def", "value) @property def LearnFrequency(self): # type: () -> str \"\"\"", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self, value): # type: (bool)", "through the Connection class will block until the operation is", "on the server. Waits for the execution of the specified", "() -> bool \"\"\" Returns ------- - bool: Sends router", "FastPathRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property", "address. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def LearnNumFrames(self, value): # type:", "@LearnSendMacOnly.setter def LearnSendMacOnly(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'],", "ports. - LearnWaitTimeBeforeTransmit (number): Specifies the length of time in", "report for the last succesfull test run. generateReport(async_operation=bool)string ------------------------------------------ -", "**Any) -> Union[List[str], None] \"\"\"Executes the waitForTest operation on the", "notice and this permission notice shall be included in #", "self._execute('applyAsync', payload=payload, response_object=None) def ApplyAsyncResult(self, *args, **kwargs): # type: (*Any,", "FastPathLearnFrameSize (number): Specifies the size of the learning frames in", "the learning frames from all the ports. - LearnWaitTimeBeforeTransmit (number):", "kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsyncResult', payload=payload, response_object=None) def ApplyITWizardConfiguration(self,", "-> None \"\"\"Executes the applyITWizardConfiguration operation on the server. Applies", "sending all the \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self, value):", "@property def LearnWaitTimeBeforeTransmit(self): # type: () -> int \"\"\" Returns", "def FastPathRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value)", "- async_operation (bool=False): True to execute the operation asynchronously. Any", "kwargs.items(): payload[item[0]] = item[1] return self._execute('run', payload=payload, response_object=None) def Start(self,", "method is synchronous and returns the result of the test.", "None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property def LearnNumFrames(self): # type: () ->", "sell copies of the Software, and to permit persons to", "the apply operation on the server. Applies the specified Quick", "self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property def FastPathRate(self): # type: () -> int", "for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsync', payload=payload,", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def FastPathEnable(self, value): # type: (bool)", "bool, int, int) -> LearnFrames \"\"\"Updates learnFrames resource on the", "- number: Specifies the size of the learning frames. \"\"\"", "return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self, value): # type: (int) ->", "that IxNetwork sends for each address. - LearnRate (number): Specifies", "the rate at which IxNetwork sends learn frames through fast", "\"\"\" __slots__ = () _SDM_NAME = 'learnFrames' _SDM_ATT_MAP = {", "FastPathRate (number): Specifies the rate at which IxNetwork sends learn", "--------------------------------------------------- - InputParameters (str): The input arguments of the test.", "-> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property def FastPathRate(self): # type: ()", "frames that IxNetwork sends for each address. - LearnRate (number):", "type: (*Any, **Any) -> None \"\"\"Executes the start operation on", "completed. waitForTest(async_operation=bool)list ------------------------------------- - async_operation (bool=False): True to execute the", "(str): The input arguments of the test. - async_operation (bool=False):", "value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property def", "async_operation=bool)list ----------------------------------------------------- - InputParameters (str): The input arguments of the", "the result of the test. Raises ------ - NotFoundError: The", "self._execute('stop', payload=payload, response_object=None) def WaitForTest(self, *args, **kwargs): # type: (*Any,", "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "the specified Quick Test to be completed. waitForTest(async_operation=bool)list ------------------------------------- -", "self).__init__(parent, list_op) @property def FastPathEnable(self): # type: () -> bool", "operation is complete. start(InputParameters=string, async_operation=bool) --------------------------------------------------- - InputParameters (str): The", "the specified Quick Test. apply(async_operation=bool) --------------------------- - async_operation (bool=False): True", "path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def FastPathRate(self, value): # type:", "at which IxNetwork sends learn frames to the DUT. \"\"\"", "'learnFrameSize', 'LearnFrequency': 'learnFrequency', 'LearnNumFrames': 'learnNumFrames', 'LearnRate': 'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation':", "def GenerateReport(self, *args, **kwargs): # type: (*Any, **Any) -> Union[str,", "encountered an uncategorized error condition \"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def", "'learnNumFrames', 'LearnRate': 'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit':", "self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None,", "IxNetwork sends learn frames to the DUT. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate'])", "payload=payload, response_object=None) def GenerateReport(self, *args, **kwargs): # type: (*Any, **Any)", "= item[1] return self._execute('applyAsyncResult', payload=payload, response_object=None) def ApplyITWizardConfiguration(self, *args, **kwargs):", "FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN", "**kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the start", "Stops the currently running Quick Test. stop(async_operation=bool) -------------------------- - async_operation", "-> int \"\"\" Returns ------- - number: Specifies the length", "frames from all the ports. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def", "Connection class will block until the operation is complete. -", "on the server. Stops the currently running Quick Test. stop(async_operation=bool)", "def LearnRate(self): # type: () -> int \"\"\" Returns -------", "Returns ------- - bool: Sends router solicitation messages. \"\"\" return", "Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ - async_operation (bool=False): True to execute the", "and returns the result of the test. Raises ------ -", "Signatures with the same name while python does not. run(async_operation=bool)list", "rights to use, copy, modify, merge, publish, distribute, sublicense, #", "Specifies the size of the learning frames. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize'])", "Returns ------- - number: Specifies the size of the learning", "def LearnSendMacOnly(self): # type: () -> bool \"\"\" Returns -------", "----------------------------------------------------- - InputParameters (str): The input arguments of the test.", "the learning frames. - LearnFrequency (str(never | onBinaryIteration | oncePerFramesize", "Specifies the length of time in ms that IxNetwork pauses", "_SDM_ATT_MAP = { 'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate':", "------------------------------------------ - async_operation (bool=False): True to execute the operation asynchronously.", "**Any) -> Union[List[str], None] \"\"\"Executes the run operation on the", "Test. The IxNetwork model allows for multiple method Signatures with", "path. - FastPathRate (number): Specifies the rate at which IxNetwork", "() -> int \"\"\" Returns ------- - number: Specifies the", "def FastPathRate(self): # type: () -> int \"\"\" Returns -------", "'onTrial'], } def __init__(self, parent, list_op=False): super(LearnFrames, self).__init__(parent, list_op) @property", "------ - NotFoundError: The requested resource does not exist on", "value) @property def FastPathLearnFrameSize(self): # type: () -> int \"\"\"", "Test and waits for its execution to finish. The IxNetwork", "'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP", "Returns ------- - bool: If true, enables fast path transmit.", "the start operation on the server. Starts the specified Quick", "frames that IxNetwork sends during the test. The LearnFrames class", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsyncResult', payload=payload,", "of the specified Quick Test to be completed. waitForTest(async_operation=bool)list -------------------------------------", "response_object=None) def Stop(self, *args, **kwargs): # type: (*Any, **Any) ->", "def ApplyITWizardConfiguration(self, *args, **kwargs): # type: (*Any, **Any) -> None", "learning frames. - LearnFrequency (str(never | onBinaryIteration | oncePerFramesize |", "None] \"\"\"Executes the generateReport operation on the server. Generate a", "type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property def FastPathLearnFrameSize(self): #", "or substantial portions of the Software. # # THE SOFTWARE", "@property def LearnWaitTime(self): # type: () -> int \"\"\" Returns", "number of learning frames that IxNetwork sends for each address.", "from all the ports. - LearnWaitTimeBeforeTransmit (number): Specifies the length", "type: (*Any, **Any) -> Union[str, None] \"\"\"Executes the generateReport operation", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "Waits for the execution of the specified Quick Test to", "the operation is complete. Raises ------ - NotFoundError: The requested", "each address. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def LearnNumFrames(self, value): #", "(bool): If true, enables fast path transmit. - FastPathLearnFrameSize (number):", "CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION", "that IxNetwork sends through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter", "sends during the test. The LearnFrames class encapsulates a required", "oncePerTest | onTrial): Allows to choose how frequently IxNetwork sends", "(*Any, **Any) -> Union[List[str], None] \"\"\"Executes the waitForTest operation on", "# # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('start',", "the currently running Quick Test. stop(async_operation=bool) -------------------------- - async_operation (bool=False):", "will be retrieved from the server every time the property", "- LearnFrequency (str(never | onBinaryIteration | oncePerFramesize | oncePerTest |", "value) def update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None,", "# Software is furnished to do so, subject to the", "LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): # type: (bool,", "true, enables fast path transmit. - FastPathLearnFrameSize (number): Specifies the", "-> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property def LearnSendMacOnly(self): # type: ()", "SOFTWARE. from uhd_restpy.base import Base from uhd_restpy.files import Files from", "-> None \"\"\"Executes the start operation on the server. Starts", "to whom the # Software is furnished to do so,", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER", "return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def LearnSendMacOnly(self, value): # type: (bool) ->", "return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def LearnWaitTime(self, value): # type: (int) ->", "so, subject to the following conditions: # # The above", "ms that IxNetwork pauses before sending all the Raises ------", "run. generateReport(async_operation=bool)string ------------------------------------------ - async_operation (bool=False): True to execute the", "| onBinaryIteration | oncePerFramesize | oncePerTest | onTrial): Allows to", "ApplyAsync(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes", "not. start(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute the", "the learning frames in the fast path. - FastPathNumFrames (number):", "None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property def LearnWaitTime(self): # type: () ->", "the following conditions: # # The above copyright notice and", "\"\"\"Executes the generateReport operation on the server. Generate a PDF", "FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def LearnFrameSize(self, value): # type: (int)", "Quick Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ - async_operation (bool=False): True to execute", "on the server. Args ---- - FastPathEnable (bool): If true,", "def FastPathEnable(self): # type: () -> bool \"\"\" Returns -------", "fast path. - FastPathNumFrames (number): Specifies the number of learn", "messages. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self, value): # type:", "for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('apply', payload=payload,", "waitForTest(async_operation=bool)list ------------------------------------- - async_operation (bool=False): True to execute the operation", "# type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property def FastPathRate(self):", "--------------------------- - async_operation (bool=False): True to execute the operation asynchronously.", "- LearnSendMacOnly (bool): Sends learning frames to MAC address only.", "-> bool \"\"\" Returns ------- - bool: Sends learning frames", "'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate': 'fastPathRate', 'LearnFrameSize': 'learnFrameSize',", "IxNetwork sends for each address. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def", "self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property def LearnFrequency(self): # type: () -> str", "'LearnFrequency': 'learnFrequency', 'LearnNumFrames': 'learnNumFrames', 'LearnRate': 'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation',", "in # all copies or substantial portions of the Software.", "(int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property def LearnWaitTimeBeforeTransmit(self): # type:", "during the test. The LearnFrames class encapsulates a required learnFrames", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def LearnNumFrames(self, value): # type: (int)", "Specifies the size of the learning frames. - LearnFrequency (str(never", "\"\"\" Returns ------- - number: Specifies the number of learn", "Stop(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes", "files (the \"Software\"), # to deal in the Software without", "frames through fast path. - LearnFrameSize (number): Specifies the size", "# type: () -> int \"\"\" Returns ------- - number:", "without restriction, including without limitation # the rights to use,", "type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property def LearnWaitTimeBeforeTransmit(self): #", "made through the Connection class will block until the operation", "has encountered an uncategorized error condition \"\"\" payload = {", "- InputParameters (str): The input arguments of the test. -", "class will block until the operation is complete. Raises ------", "Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "to any person obtaining a copy # of this software", "- number: Specifies the number of learning frames that IxNetwork", "frames to the DUT. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def LearnRate(self,", "frames to MAC address only. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def", "enables fast path transmit. - FastPathLearnFrameSize (number): Specifies the size", "of the Software, and to permit persons to whom the", "the rate at which IxNetwork sends learn frames to the", "the number of learn frames that IxNetwork sends through fast", "value) @property def LearnWaitTime(self): # type: () -> int \"\"\"", "distribute, sublicense, # and/or sell copies of the Software, and", "'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate': 'fastPathRate', 'LearnFrameSize': 'learnFrameSize', 'LearnFrequency':", "operation is complete. - Returns str: This method is asynchronous", "FastPathEnable(self): # type: () -> bool \"\"\" Returns ------- -", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "synchronous and returns the result of the test. run(InputParameters=string, async_operation=bool)list", "operation on the server. Starts the specified Quick Test and", "for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('waitForTest', payload=payload,", "- Returns list(str): This method is synchronous and returns the", "args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyITWizardConfiguration',", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self, value): # type: (int)", "DUT. - LearnSendMacOnly (bool): Sends learning frames to MAC address", "fast path. - FastPathRate (number): Specifies the rate at which", "will block until the operation is complete. start(InputParameters=string, async_operation=bool) ---------------------------------------------------", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING", "before sending all the learning frames from all the ports.", "operation on the server. Stops the currently running Quick Test.", "sends through fast path. - FastPathRate (number): Specifies the rate", "uhd_restpy.base import Base from uhd_restpy.files import Files from typing import", "None \"\"\"Executes the stop operation on the server. Stops the", "learn frames through fast path. - LearnFrameSize (number): Specifies the", "complete. - Returns bool: Raises ------ - NotFoundError: The requested", "of this software and associated documentation files (the \"Software\"), #", "start operation on the server. Starts the specified Quick Test.", "def LearnWaitTime(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value)", "MAC address only. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def LearnSendMacOnly(self, value):", "args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('apply',", "return self._execute('run', payload=payload, response_object=None) def Start(self, *args, **kwargs): # type:", "# type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property def LearnWaitTimeBeforeTransmit(self):", "rate at which IxNetwork sends learn frames to the DUT.", "until the operation is complete. - Returns list(str): This method", "\"\"\" Returns ------- - number: Specifies the size of the", "Specifies the number of learn frames that IxNetwork sends through", "**Any) -> None \"\"\"Executes the applyAsync operation on the server.", "the last succesfull test run. generateReport(async_operation=bool)string ------------------------------------------ - async_operation (bool=False):", "in the fast path. - FastPathNumFrames (number): Specifies the number", "of time in ms that IxNetwork pauses before sending all", "() -> bool \"\"\" Returns ------- - bool: Sends learning", "= item[1] return self._execute('run', payload=payload, response_object=None) def Start(self, *args, **kwargs):", "str \"\"\" Returns ------- - str(never | onBinaryIteration | oncePerFramesize", "of the Software. # # THE SOFTWARE IS PROVIDED \"AS", "until the operation is complete. Raises ------ - NotFoundError: The", "(*Any, **Any) -> None \"\"\"Executes the stop operation on the", "sends learning frames during the test. - LearnNumFrames (number): Specifies", "copy, modify, merge, publish, distribute, sublicense, # and/or sell copies", "router solicitation messages. - LearnWaitTime (number): Specifies the length of", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "running Quick Test. stop(async_operation=bool) -------------------------- - async_operation (bool=False): True to", "messages. - LearnWaitTime (number): Specifies the length of time in", "payload=payload, response_object=None) def ApplyAsync(self, *args, **kwargs): # type: (*Any, **Any)", "Returns ------- - str(never | onBinaryIteration | oncePerFramesize | oncePerTest", "payload[item[0]] = item[1] return self._execute('apply', payload=payload, response_object=None) def ApplyAsync(self, *args,", "transmit. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def FastPathEnable(self, value): # type:", "= { \"Arg1\": self.href } for i in range(len(args)): payload['Arg%s'", "# to deal in the Software without restriction, including without", "return self._execute('apply', payload=payload, response_object=None) def ApplyAsync(self, *args, **kwargs): # type:", "(str(never | onBinaryIteration | oncePerFramesize | oncePerTest | onTrial)): Allows", "on the server. Applies the specified Quick Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------", "the length of time in ms that IxNetwork pauses before", "------- - number: Specifies the size of the learning frames", "the specified Quick Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ - async_operation (bool=False): True", "python does not. start(async_operation=bool) --------------------------- - async_operation (bool=False): True to", "the test. The LearnFrames class encapsulates a required learnFrames resource", "class LearnFrames(Base): \"\"\"The learning frames that IxNetwork sends during the", "1997 - 2020 by IXIA Keysight # # Permission is", "IN # THE SOFTWARE. from uhd_restpy.base import Base from uhd_restpy.files", "the test. - async_operation (bool=False): True to execute the operation", "self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self, value): # type: (int) -> None", "- 2020 by IXIA Keysight # # Permission is hereby", "learn frames to the DUT. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def", "self._execute('run', payload=payload, response_object=None) def Start(self, *args, **kwargs): # type: (*Any,", "THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE", "will block until the operation is complete. - Returns str:", "type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property def FastPathRate(self): #", "above copyright notice and this permission notice shall be included", "requested resource does not exist on the server - ServerError:", "item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsync', payload=payload, response_object=None)", "def LearnSendRouterSolicitation(self): # type: () -> bool \"\"\" Returns -------", "pauses before sending all the \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def", "value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property def", "Connection class will block until the operation is complete. Raises", "= item[1] return self._execute('start', payload=payload, response_object=None) def Stop(self, *args, **kwargs):", "-> Union[bool, None] \"\"\"Executes the applyAsyncResult operation on the server.", "EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "the applyITWizardConfiguration operation on the server. Applies the specified Quick", "LearnFrameSize(self): # type: () -> int \"\"\" Returns ------- -", "IxNetwork pauses before sending all the \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter", "has encountered an uncategorized error condition \"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))", "for each address. - LearnRate (number): Specifies the rate at", "on the server. Generate a PDF report for the last", "at which IxNetwork sends learn frames through fast path. \"\"\"", "Any subsequent rest api calls made through the Connection class", "value) @property def LearnSendRouterSolicitation(self): # type: () -> bool \"\"\"", "same name while python does not. run(async_operation=bool)list ----------------------------- - async_operation", "- bool: If true, enables fast path transmit. \"\"\" return", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT", "currently running Quick Test. stop(async_operation=bool) -------------------------- - async_operation (bool=False): True", "None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property def FastPathLearnFrameSize(self): # type: () ->", "in kwargs.items(): payload[item[0]] = item[1] return self._execute('stop', payload=payload, response_object=None) def", "OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN #", "item[1] return self._execute('applyITWizardConfiguration', payload=payload, response_object=None) def GenerateReport(self, *args, **kwargs): #", "test. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def LearnFrequency(self, value): # type:", "**Any) -> None \"\"\"Executes the applyITWizardConfiguration operation on the server.", "to the following conditions: # # The above copyright notice", "Union[List[str], None] \"\"\"Executes the run operation on the server. Starts", "'FastPathRate': 'fastPathRate', 'LearnFrameSize': 'learnFrameSize', 'LearnFrequency': 'learnFrequency', 'LearnNumFrames': 'learnNumFrames', 'LearnRate': 'learnRate',", "learnFrames resource on the server. Args ---- - FastPathEnable (bool):", "import Files from typing import List, Any, Union class LearnFrames(Base):", "None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property def FastPathRate(self): # type: () ->", "for the execution of the specified Quick Test to be", "fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def FastPathRate(self, value): #", "through fast path. - FastPathRate (number): Specifies the rate at", "pauses before sending all the Raises ------ - ServerError: The", "- LearnSendRouterSolicitation (bool): Sends router solicitation messages. - LearnWaitTime (number):", "LearnWaitTime(self): # type: () -> int \"\"\" Returns ------- -", "item[1] return self._execute('applyAsync', payload=payload, response_object=None) def ApplyAsyncResult(self, *args, **kwargs): #", "applyITWizardConfiguration operation on the server. Applies the specified Quick Test.", "bool, bool, int, int) -> LearnFrames \"\"\"Updates learnFrames resource on", "ms that IxNetwork pauses before sending all the learning frames", "(bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property def LearnSendRouterSolicitation(self): # type:", "return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def LearnFrameSize(self, value): # type: (int) ->", "# type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property def LearnWaitTime(self):", "LearnWaitTimeBeforeTransmit (number): Specifies the length of time in ms that", "kwargs.items(): payload[item[0]] = item[1] return self._execute('generateReport', payload=payload, response_object=None) def Run(self,", "-> Union[List[str], None] \"\"\"Executes the waitForTest operation on the server.", "LearnWaitTimeBeforeTransmit=None): # type: (bool, int, int, int, int, str, int,", "api calls made through the Connection class will block until", "subsequent rest api calls made through the Connection class will", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "IxNetwork sends through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def", "operation asynchronously. Any subsequent rest api calls made through the", "specified Quick Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ - async_operation (bool=False): True to", "@LearnRate.setter def LearnRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'],", "FastPathNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property", "granted, free of charge, to any person obtaining a copy", "@property def FastPathLearnFrameSize(self): # type: () -> int \"\"\" Returns", "the operation is complete. - Returns str: This method is", "args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsync',", "publish, distribute, sublicense, # and/or sell copies of the Software,", "# type: (*Any, **Any) -> None \"\"\"Executes the apply operation", "return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self, value): # type: (int) ->", "sends learn frames to the DUT. - LearnSendMacOnly (bool): Sends", "the server. Waits for the execution of the specified Quick", "number: Specifies the length of time in ms that IxNetwork", "all the learning frames from all the ports. \"\"\" return", "on the server. applyAsyncResult(async_operation=bool)bool ------------------------------------------ - async_operation (bool=False): True to", "# type: (*Any, **Any) -> None \"\"\"Executes the applyAsync operation", "# the rights to use, copy, modify, merge, publish, distribute,", "# type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property def LearnFrameSize(self):", "fast path. - LearnFrameSize (number): Specifies the size of the", "Quick Test and waits for its execution to finish. The", "args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsyncResult',", "while python does not. start(async_operation=bool) --------------------------- - async_operation (bool=False): True", "self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def FastPathNumFrames(self, value): # type: (int) -> None", "def LearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value)", "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "| oncePerFramesize | oncePerTest | onTrial): Allows to choose how", "uhd_restpy.files import Files from typing import List, Any, Union class", "None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property def LearnFrequency(self): # type: () ->", "# type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def update(self, FastPathEnable=None,", "an uncategorized error condition \"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def Apply(self,", "payload=payload, response_object=None) def ApplyITWizardConfiguration(self, *args, **kwargs): # type: (*Any, **Any)", "(*Any, **Any) -> None \"\"\"Executes the applyITWizardConfiguration operation on the", "path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def FastPathNumFrames(self, value): # type:", "sends learn frames through fast path. - LearnFrameSize (number): Specifies", "- LearnRate (number): Specifies the rate at which IxNetwork sends", "test. - async_operation (bool=False): True to execute the operation asynchronously.", "frames. - LearnFrequency (str(never | onBinaryIteration | oncePerFramesize | oncePerTest", "from the server every time the property is accessed. \"\"\"", "TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR", "This method is asynchronous and has no return value. Raises", "that IxNetwork pauses before sending all the learning frames from", "int, int, int, str, int, int, bool, bool, int, int)", "self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self, value): # type: (int) -> None", "deal in the Software without restriction, including without limitation #", "{ 'learnFrequency': ['never', 'onBinaryIteration', 'oncePerFramesize', 'oncePerTest', 'onTrial'], } def __init__(self,", "bool \"\"\" Returns ------- - bool: Sends router solicitation messages.", "(number): Specifies the rate at which IxNetwork sends learn frames", "the test. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def LearnFrequency(self, value): #", "all the ports. - LearnWaitTimeBeforeTransmit (number): Specifies the length of", "# # The above copyright notice and this permission notice", "the Raises ------ - ServerError: The server has encountered an", "all the \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self, value): #", "LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): # type: (bool, int,", "copies of the Software, and to permit persons to whom", "- LearnWaitTime (number): Specifies the length of time in ms", "to deal in the Software without restriction, including without limitation", "List, Any, Union class LearnFrames(Base): \"\"\"The learning frames that IxNetwork", "bool: If true, enables fast path transmit. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable'])", "the run operation on the server. Starts the specified Quick", "for its execution to finish. The IxNetwork model allows for", "OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "item in kwargs.items(): payload[item[0]] = item[1] return self._execute('start', payload=payload, response_object=None)", "locals())) def Apply(self, *args, **kwargs): # type: (*Any, **Any) ->", "(number): Specifies the length of time in ms that IxNetwork", "\"Software\"), # to deal in the Software without restriction, including", "method is asynchronous and has no return value. Raises ------", "-------------------------- - async_operation (bool=False): True to execute the operation asynchronously.", "in ms that IxNetwork pauses before sending all the learning", "the learning frames. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def LearnFrameSize(self, value):", "error condition \"\"\" payload = { \"Arg1\": self.href } for", "time the property is accessed. \"\"\" __slots__ = () _SDM_NAME", "- ServerError: The server has encountered an uncategorized error condition", "self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property def LearnFrameSize(self): # type: () -> int", "including without limitation # the rights to use, copy, modify,", "return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def LearnNumFrames(self, value): # type: (int) ->", "# type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property def FastPathLearnFrameSize(self):", "FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None,", "server. Starts the specified Quick Test. The IxNetwork model allows", "the learning frames from all the ports. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime'])", "has no return value. Raises ------ - NotFoundError: The requested", "only. - LearnSendRouterSolicitation (bool): Sends router solicitation messages. - LearnWaitTime", "**kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the apply", "Software, and to permit persons to whom the # Software", "through fast path. - LearnFrameSize (number): Specifies the size of", "the property is accessed. \"\"\" __slots__ = () _SDM_NAME =", "FastPathRate(self): # type: () -> int \"\"\" Returns ------- -", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR", "------- - number: Specifies the number of learn frames that", "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "typing import List, Any, Union class LearnFrames(Base): \"\"\"The learning frames", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #", "item[1] return self._execute('apply', payload=payload, response_object=None) def ApplyAsync(self, *args, **kwargs): #", "ApplyITWizardConfiguration(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes", "value): # type: (str) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property def", "be retrieved from the server every time the property is", "'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP = { 'learnFrequency':", "@FastPathRate.setter def FastPathRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'],", "return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def FastPathNumFrames(self, value): # type: (int) ->", "(*Any, **Any) -> Union[List[str], None] \"\"\"Executes the run operation on", "uncategorized error condition \"\"\" payload = { \"Arg1\": self.href }", "of the test. - async_operation (bool=False): True to execute the", "# of this software and associated documentation files (the \"Software\"),", "furnished to do so, subject to the following conditions: #", "# The above copyright notice and this permission notice shall", "kwargs.items(): payload[item[0]] = item[1] return self._execute('applyITWizardConfiguration', payload=payload, response_object=None) def GenerateReport(self,", "} def __init__(self, parent, list_op=False): super(LearnFrames, self).__init__(parent, list_op) @property def", "Specifies the rate at which IxNetwork sends learn frames through", "'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP = { 'learnFrequency': ['never', 'onBinaryIteration', 'oncePerFramesize',", "returns the result of the test. Raises ------ - NotFoundError:", "operation on the server. Applies the specified Quick Test. apply(async_operation=bool)", "Returns ------- - bool: Sends learning frames to MAC address", "onBinaryIteration | oncePerFramesize | oncePerTest | onTrial)): Allows to choose", "-> None \"\"\"Executes the applyAsync operation on the server. applyAsync(async_operation=bool)", "by IXIA Keysight # # Permission is hereby granted, free", "frames to the DUT. - LearnSendMacOnly (bool): Sends learning frames", "frames that IxNetwork sends through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames'])", "LearnNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "Sends learning frames to MAC address only. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'])", "length of time in ms that IxNetwork pauses before sending", "following conditions: # # The above copyright notice and this", "| oncePerFramesize | oncePerTest | onTrial)): Allows to choose how", "conditions: # # The above copyright notice and this permission", "and associated documentation files (the \"Software\"), # to deal in", "- LearnNumFrames (number): Specifies the number of learning frames that", "Union[str, None] \"\"\"Executes the generateReport operation on the server. Generate", "(bool, int, int, int, int, str, int, int, bool, bool,", "# type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes the run", "def FastPathLearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value)", "that IxNetwork sends during the test. The LearnFrames class encapsulates", "value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property def", "None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property def LearnWaitTimeBeforeTransmit(self): # type: () ->", "retrieved from the server every time the property is accessed.", "test. The LearnFrames class encapsulates a required learnFrames resource which", "bool: Sends learning frames to MAC address only. \"\"\" return", "@property def LearnFrequency(self): # type: () -> str \"\"\" Returns", "LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): # type: (bool, int, int,", "LearnSendRouterSolicitation(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property", "that IxNetwork pauses before sending all the \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'])", "rest api calls made through the Connection class will block", "be included in # all copies or substantial portions of", "server every time the property is accessed. \"\"\" __slots__ =", "-> int \"\"\" Returns ------- - number: Specifies the rate", "------- - bool: Sends learning frames to MAC address only.", "LearnNumFrames (number): Specifies the number of learning frames that IxNetwork", "choose how frequently IxNetwork sends learning frames during the test.", "before sending all the Raises ------ - ServerError: The server", "None] \"\"\"Executes the applyAsyncResult operation on the server. applyAsyncResult(async_operation=bool)bool ------------------------------------------", "FastPathLearnFrameSize(self): # type: () -> int \"\"\" Returns ------- -", "# # Permission is hereby granted, free of charge, to", "the specified Quick Test and waits for its execution to", "'fastPathRate', 'LearnFrameSize': 'learnFrameSize', 'LearnFrequency': 'learnFrequency', 'LearnNumFrames': 'learnNumFrames', 'LearnRate': 'learnRate', 'LearnSendMacOnly':", "class will block until the operation is complete. start(InputParameters=string, async_operation=bool)", "the # Software is furnished to do so, subject to", "subject to the following conditions: # # The above copyright", "Keysight # # Permission is hereby granted, free of charge,", "be completed. waitForTest(async_operation=bool)list ------------------------------------- - async_operation (bool=False): True to execute", "THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from", "number: Specifies the number of learning frames that IxNetwork sends", "self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property def LearnWaitTime(self): # type: () -> int", "**Any) -> Union[bool, None] \"\"\"Executes the applyAsyncResult operation on the", "documentation files (the \"Software\"), # to deal in the Software", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def LearnRate(self, value): # type: (int)", "address only. - LearnSendRouterSolicitation (bool): Sends router solicitation messages. -", "modify, merge, publish, distribute, sublicense, # and/or sell copies of", "LearnFrames(Base): \"\"\"The learning frames that IxNetwork sends during the test.", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "@property def LearnNumFrames(self): # type: () -> int \"\"\" Returns", "(*Any, **Any) -> None \"\"\"Executes the start operation on the", "to finish. The IxNetwork model allows for multiple method Signatures", "------- - str(never | onBinaryIteration | oncePerFramesize | oncePerTest |", "python does not. run(async_operation=bool)list ----------------------------- - async_operation (bool=False): True to", "# type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property def FastPathNumFrames(self):", "- bool: Sends learning frames to MAC address only. \"\"\"", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "# # Copyright 1997 - 2020 by IXIA Keysight #", "------- - bool: If true, enables fast path transmit. \"\"\"", "'LearnFrameSize': 'learnFrameSize', 'LearnFrequency': 'learnFrequency', 'LearnNumFrames': 'learnNumFrames', 'LearnRate': 'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly',", "type: (*Any, **Any) -> Union[bool, None] \"\"\"Executes the applyAsyncResult operation", "- NotFoundError: The requested resource does not exist on the", "@LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'],", "restriction, including without limitation # the rights to use, copy,", "NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT", "value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property def", "MAC address only. - LearnSendRouterSolicitation (bool): Sends router solicitation messages.", "only. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def LearnSendMacOnly(self, value): # type:", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR", "(bool) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property def FastPathLearnFrameSize(self): # type:", "on the server. Starts the specified Quick Test. The IxNetwork", "every time the property is accessed. \"\"\" __slots__ = ()", "type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property def FastPathNumFrames(self): #", "If true, enables fast path transmit. - FastPathLearnFrameSize (number): Specifies", "value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property def", "FastPathNumFrames(self): # type: () -> int \"\"\" Returns ------- -", "def LearnNumFrames(self): # type: () -> int \"\"\" Returns -------", "the test. - LearnNumFrames (number): Specifies the number of learning", "LearnWaitTime (number): Specifies the length of time in ms that", "path transmit. - FastPathLearnFrameSize (number): Specifies the size of the", "in ms that IxNetwork pauses before sending all the Raises", "return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def LearnRate(self, value): # type: (int) ->", "waitForTest operation on the server. Waits for the execution of", "DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF", "an uncategorized error condition \"\"\" payload = { \"Arg1\": self.href", "value) @property def LearnWaitTimeBeforeTransmit(self): # type: () -> int \"\"\"", "int, int) -> LearnFrames \"\"\"Updates learnFrames resource on the server.", "will block until the operation is complete. - Returns bool:", "parent, list_op=False): super(LearnFrames, self).__init__(parent, list_op) @property def FastPathEnable(self): # type:", "payload=payload, response_object=None) def ApplyAsyncResult(self, *args, **kwargs): # type: (*Any, **Any)", "this software and associated documentation files (the \"Software\"), # to", "(int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property def LearnFrameSize(self): # type:", "class encapsulates a required learnFrames resource which will be retrieved", "Sends router solicitation messages. - LearnWaitTime (number): Specifies the length", "path. - LearnFrameSize (number): Specifies the size of the learning", "LearnWaitTime(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property", "property is accessed. \"\"\" __slots__ = () _SDM_NAME = 'learnFrames'", "(number): Specifies the size of the learning frames in the", "item in kwargs.items(): payload[item[0]] = item[1] return self._execute('generateReport', payload=payload, response_object=None)", "GenerateReport(self, *args, **kwargs): # type: (*Any, **Any) -> Union[str, None]", "str: This method is asynchronous and has no return value.", "on the server. Applies the specified Quick Test. apply(async_operation=bool) ---------------------------", "OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE.", "LearnFrames class encapsulates a required learnFrames resource which will be", "for each address. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def LearnNumFrames(self, value):", "**kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the applyITWizardConfiguration", "block until the operation is complete. - Returns bool: Raises", "*args, **kwargs): # type: (*Any, **Any) -> Union[str, None] \"\"\"Executes", "sends for each address. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def LearnNumFrames(self,", "is complete. Raises ------ - NotFoundError: The requested resource does", "operation is complete. - Returns bool: Raises ------ - NotFoundError:", "def Start(self, *args, **kwargs): # type: (*Any, **Any) -> None", "ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN", "None \"\"\"Executes the apply operation on the server. Applies the", "------- - number: Specifies the length of time in ms", "server. applyAsync(async_operation=bool) -------------------------------- - async_operation (bool=False): True to execute the", "pauses before sending all the learning frames from all the", "-> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property def LearnWaitTimeBeforeTransmit(self): # type: ()", "Applies the specified Quick Test. apply(async_operation=bool) --------------------------- - async_operation (bool=False):", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "substantial portions of the Software. # # THE SOFTWARE IS", "\"\"\" Returns ------- - number: Specifies the length of time", "item[1] return self._execute('start', payload=payload, response_object=None) def Stop(self, *args, **kwargs): #", "which IxNetwork sends learn frames through fast path. - LearnFrameSize", "apply operation on the server. Applies the specified Quick Test.", "and returns the result of the test. run(InputParameters=string, async_operation=bool)list -----------------------------------------------------", "merge, publish, distribute, sublicense, # and/or sell copies of the", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "type: (str) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property def LearnNumFrames(self): #", "def update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None,", "IXIA Keysight # # Permission is hereby granted, free of", "LearnWaitTimeBeforeTransmit(self): # type: () -> int \"\"\" Returns ------- -", "update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None,", "item in kwargs.items(): payload[item[0]] = item[1] return self._execute('waitForTest', payload=payload, response_object=None)", "any person obtaining a copy # of this software and", "ARISING FROM, # OUT OF OR IN CONNECTION WITH THE", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "LearnRate (number): Specifies the rate at which IxNetwork sends learn", "sends learn frames to the DUT. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter", "Starts the specified Quick Test. The IxNetwork model allows for", "number: Specifies the size of the learning frames. \"\"\" return", "IxNetwork pauses before sending all the learning frames from all", "# type: () -> bool \"\"\" Returns ------- - bool:", "type: () -> bool \"\"\" Returns ------- - bool: If", "permit persons to whom the # Software is furnished to", "the DUT. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def LearnRate(self, value): #", "copyright notice and this permission notice shall be included in", "__slots__ = () _SDM_NAME = 'learnFrames' _SDM_ATT_MAP = { 'FastPathEnable':", "type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property def LearnSendRouterSolicitation(self): #", "for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('run', payload=payload,", "succesfull test run. generateReport(async_operation=bool)string ------------------------------------------ - async_operation (bool=False): True to", "FastPathLearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property", "which IxNetwork sends learn frames through fast path. \"\"\" return", "server. Args ---- - FastPathEnable (bool): If true, enables fast", "def FastPathLearnFrameSize(self): # type: () -> int \"\"\" Returns -------", "# type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property def LearnSendMacOnly(self):", "value) @property def LearnSendMacOnly(self): # type: () -> bool \"\"\"", "def LearnRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value)", "learning frames that IxNetwork sends during the test. The LearnFrames", "int, int, str, int, int, bool, bool, int, int) ->", "response_object=None) def WaitForTest(self, *args, **kwargs): # type: (*Any, **Any) ->", "time in ms that IxNetwork pauses before sending all the", "limitation # the rights to use, copy, modify, merge, publish,", "- str(never | onBinaryIteration | oncePerFramesize | oncePerTest | onTrial):", "of the learning frames in the fast path. \"\"\" return", "the generateReport operation on the server. Generate a PDF report", "None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property def LearnRate(self): # type: () ->", "oncePerFramesize | oncePerTest | onTrial)): Allows to choose how frequently", "Returns ------- - number: Specifies the number of learning frames", "bool: Raises ------ - NotFoundError: The requested resource does not", "def LearnNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value)", "payload[item[0]] = item[1] return self._execute('start', payload=payload, response_object=None) def Stop(self, *args,", "True to execute the operation asynchronously. Any subsequent rest api", "frames that IxNetwork sends through fast path. - FastPathRate (number):", "= item[1] return self._execute('stop', payload=payload, response_object=None) def WaitForTest(self, *args, **kwargs):", "EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "Quick Test. The IxNetwork model allows for multiple method Signatures", "() _SDM_NAME = 'learnFrames' _SDM_ATT_MAP = { 'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize':", "is synchronous and returns the result of the test. Raises", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS", "will block until the operation is complete. Raises ------ -", "-> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None,", "Sends router solicitation messages. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self,", "value) @property def LearnNumFrames(self): # type: () -> int \"\"\"", "LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): # type: (bool, int, int, int, int, str,", "**Any) -> None \"\"\"Executes the stop operation on the server.", "type: (bool, int, int, int, int, str, int, int, bool,", "sending all the learning frames from all the ports. -", "If true, enables fast path transmit. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "the fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self, value):", "payload=payload, response_object=None) def Start(self, *args, **kwargs): # type: (*Any, **Any)", "operation on the server. applyAsync(async_operation=bool) -------------------------------- - async_operation (bool=False): True", "number: Specifies the number of learn frames that IxNetwork sends", "resource does not exist on the server - ServerError: The", "-> str \"\"\" Returns ------- - str(never | onBinaryIteration |", "\"\"\"Executes the apply operation on the server. Applies the specified", "no return value. Raises ------ - NotFoundError: The requested resource", "LearnNumFrames(self): # type: () -> int \"\"\" Returns ------- -", "FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None,", "during the test. - LearnNumFrames (number): Specifies the number of", "asynchronous and has no return value. Raises ------ - NotFoundError:", "self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def FastPathRate(self, value): # type: (int) -> None", "'learnFrequency': ['never', 'onBinaryIteration', 'oncePerFramesize', 'oncePerTest', 'onTrial'], } def __init__(self, parent,", "value) @property def FastPathNumFrames(self): # type: () -> int \"\"\"", "\"\"\"Executes the applyITWizardConfiguration operation on the server. Applies the specified", "learning frames to MAC address only. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter", "for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('stop', payload=payload,", "------------------------------------- - async_operation (bool=False): True to execute the operation asynchronously.", "size of the learning frames in the fast path. -", "{ 'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate': 'fastPathRate', 'LearnFrameSize':", "response_object=None) def Run(self, *args, **kwargs): # type: (*Any, **Any) ->", "the same name while python does not. run(async_operation=bool)list ----------------------------- -", "Files from typing import List, Any, Union class LearnFrames(Base): \"\"\"The", "return self._execute('generateReport', payload=payload, response_object=None) def Run(self, *args, **kwargs): # type:", "size of the learning frames. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def", "operation is complete. - Returns list(str): This method is synchronous", "THE SOFTWARE. from uhd_restpy.base import Base from uhd_restpy.files import Files", "- number: Specifies the length of time in ms that", "return self._execute('applyITWizardConfiguration', payload=payload, response_object=None) def GenerateReport(self, *args, **kwargs): # type:", "OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION", "the \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self, value): # type:", "The IxNetwork model allows for multiple method Signatures with the", "OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def LearnRate(self, value): # type: (int) -> None", "-> int \"\"\" Returns ------- - number: Specifies the size", "*args, **kwargs): # type: (*Any, **Any) -> Union[bool, None] \"\"\"Executes", "type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes the waitForTest operation", "= item[1] return self._execute('apply', payload=payload, response_object=None) def ApplyAsync(self, *args, **kwargs):", "Specifies the size of the learning frames in the fast", "------------------------------------------------ - async_operation (bool=False): True to execute the operation asynchronously.", "type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property def LearnWaitTime(self): #", "args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('waitForTest',", "response_object=None) def ApplyAsyncResult(self, *args, **kwargs): # type: (*Any, **Any) ->", "return value. Raises ------ - NotFoundError: The requested resource does", "for the last succesfull test run. generateReport(async_operation=bool)string ------------------------------------------ - async_operation", "how frequently IxNetwork sends learning frames during the test. -", "The requested resource does not exist on the server -", "bool: Sends router solicitation messages. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def", "@LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'],", "LearnSendRouterSolicitation (bool): Sends router solicitation messages. - LearnWaitTime (number): Specifies", "------- - number: Specifies the number of learning frames that", "for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyITWizardConfiguration', payload=payload,", "multiple method Signatures with the same name while python does", "\"\"\"Executes the waitForTest operation on the server. Waits for the", "kwargs.items(): payload[item[0]] = item[1] return self._execute('start', payload=payload, response_object=None) def Stop(self,", "= 'learnFrames' _SDM_ATT_MAP = { 'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames':", "TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN", "The server has encountered an uncategorized error condition \"\"\" return", "copy # of this software and associated documentation files (the", "Permission is hereby granted, free of charge, to any person", "number: Specifies the rate at which IxNetwork sends learn frames", "None \"\"\"Executes the start operation on the server. Starts the", "-> None \"\"\"Executes the stop operation on the server. Stops", "self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property def LearnRate(self): # type: () -> int", "LearnWaitTimeBeforeTransmit(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def", "is complete. start(InputParameters=string, async_operation=bool) --------------------------------------------------- - InputParameters (str): The input", "-> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property def LearnFrequency(self): # type: ()", "None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None,", "item in kwargs.items(): payload[item[0]] = item[1] return self._execute('run', payload=payload, response_object=None)", "IxNetwork sends learn frames to the DUT. - LearnSendMacOnly (bool):", "sublicense, # and/or sell copies of the Software, and to", "WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT", "of the test. Raises ------ - NotFoundError: The requested resource", "payload=payload, response_object=None) def Run(self, *args, **kwargs): # type: (*Any, **Any)", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", "def __init__(self, parent, list_op=False): super(LearnFrames, self).__init__(parent, list_op) @property def FastPathEnable(self):", "= item[1] return self._execute('generateReport', payload=payload, response_object=None) def Run(self, *args, **kwargs):", "block until the operation is complete. start(InputParameters=string, async_operation=bool) --------------------------------------------------- -", "execute the operation asynchronously. Any subsequent rest api calls made", "server has encountered an uncategorized error condition \"\"\" payload =", "import Base from uhd_restpy.files import Files from typing import List,", "block until the operation is complete. Raises ------ - NotFoundError:", "int \"\"\" Returns ------- - number: Specifies the number of", "the server. Applies the specified Quick Test. apply(async_operation=bool) --------------------------- -", "**Any) -> None \"\"\"Executes the start operation on the server.", "\"\"\"Executes the stop operation on the server. Stops the currently", "# type: (*Any, **Any) -> Union[bool, None] \"\"\"Executes the applyAsyncResult", "value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property def", "arguments of the test. - async_operation (bool=False): True to execute", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def LearnSendMacOnly(self, value): # type: (bool)", "self.href } for i in range(len(args)): payload['Arg%s' % (i +", "the ports. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def LearnWaitTime(self, value): #", "args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('generateReport',", "the execution of the specified Quick Test to be completed.", "(str) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property def LearnNumFrames(self): # type:", "# type: (bool, int, int, int, int, str, int, int,", "test. run(InputParameters=string, async_operation=bool)list ----------------------------------------------------- - InputParameters (str): The input arguments", "Returns ------- - number: Specifies the number of learn frames", "router solicitation messages. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self, value):", "frequently IxNetwork sends learning frames during the test. - LearnNumFrames", "- Returns bool: Raises ------ - NotFoundError: The requested resource", "learning frames. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def LearnFrameSize(self, value): #", "hereby granted, free of charge, to any person obtaining a", "uncategorized error condition \"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def Apply(self, *args,", "run(InputParameters=string, async_operation=bool)list ----------------------------------------------------- - InputParameters (str): The input arguments of", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "------- - bool: Sends router solicitation messages. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'])", "item in kwargs.items(): payload[item[0]] = item[1] return self._execute('stop', payload=payload, response_object=None)", "the size of the learning frames. - LearnFrequency (str(never |", "# all copies or substantial portions of the Software. #", "start(InputParameters=string, async_operation=bool) --------------------------------------------------- - InputParameters (str): The input arguments of", "-> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property def LearnSendRouterSolicitation(self): # type: ()", "server. Generate a PDF report for the last succesfull test", "'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP =", "LearnFrequency(self, value): # type: (str) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property", "type: () -> bool \"\"\" Returns ------- - bool: Sends", "learn frames that IxNetwork sends through fast path. \"\"\" return", "OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH", "the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\",", "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "kwargs.items(): payload[item[0]] = item[1] return self._execute('stop', payload=payload, response_object=None) def WaitForTest(self,", "Apply(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes", "all copies or substantial portions of the Software. # #", "USE OR OTHER DEALINGS IN # THE SOFTWARE. from uhd_restpy.base", "- FastPathEnable (bool): If true, enables fast path transmit. -", "fast path transmit. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def FastPathEnable(self, value):", "Software is furnished to do so, subject to the following", "the ports. - LearnWaitTimeBeforeTransmit (number): Specifies the length of time", "the learning frames in the fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'])", "i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]", "FastPathNumFrames (number): Specifies the number of learn frames that IxNetwork", "frames to MAC address only. - LearnSendRouterSolicitation (bool): Sends router", "return self._execute('stop', payload=payload, response_object=None) def WaitForTest(self, *args, **kwargs): # type:", "(*Any, **Any) -> Union[bool, None] \"\"\"Executes the applyAsyncResult operation on", "to do so, subject to the following conditions: # #", "frequently IxNetwork sends learning frames during the test. \"\"\" return", "accessed. \"\"\" __slots__ = () _SDM_NAME = 'learnFrames' _SDM_ATT_MAP =", "SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "frames that IxNetwork sends for each address. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames'])", "IxNetwork sends for each address. - LearnRate (number): Specifies the", "applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ - async_operation (bool=False): True to execute the operation", "server. Starts the specified Quick Test and waits for its", "@property def FastPathEnable(self): # type: () -> bool \"\"\" Returns", "to permit persons to whom the # Software is furnished", "for i in range(len(args)): payload['Arg%s' % (i + 2)] =", "# type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes the waitForTest", "fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def FastPathNumFrames(self, value): #", "NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE", "until the operation is complete. - Returns str: This method", "WaitForTest(self, *args, **kwargs): # type: (*Any, **Any) -> Union[List[str], None]", "type: () -> str \"\"\" Returns ------- - str(never |", "test. - LearnNumFrames (number): Specifies the number of learning frames", "server has encountered an uncategorized error condition \"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP,", "Specifies the number of learning frames that IxNetwork sends for", "transmit. - FastPathLearnFrameSize (number): Specifies the size of the learning", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def FastPathNumFrames(self, value): # type: (int)", "'fastPathNumFrames', 'FastPathRate': 'fastPathRate', 'LearnFrameSize': 'learnFrameSize', 'LearnFrequency': 'learnFrequency', 'LearnNumFrames': 'learnNumFrames', 'LearnRate':", "- FastPathRate (number): Specifies the rate at which IxNetwork sends", "- bool: Sends router solicitation messages. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter", "operation on the server. applyAsyncResult(async_operation=bool)bool ------------------------------------------ - async_operation (bool=False): True", "ports. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def LearnWaitTime(self, value): # type:", "the test. Raises ------ - NotFoundError: The requested resource does", "sends through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def FastPathNumFrames(self,", "finish. The IxNetwork model allows for multiple method Signatures with", "None \"\"\"Executes the applyITWizardConfiguration operation on the server. Applies the", "'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate': 'fastPathRate', 'LearnFrameSize': 'learnFrameSize', 'LearnFrequency': 'learnFrequency', 'LearnNumFrames': 'learnNumFrames',", "----------------------------- - async_operation (bool=False): True to execute the operation asynchronously.", "None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property def LearnSendMacOnly(self): # type: () ->", "'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate': 'fastPathRate', 'LearnFrameSize': 'learnFrameSize', 'LearnFrequency': 'learnFrequency',", "sending all the Raises ------ - ServerError: The server has", "@LearnFrameSize.setter def LearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'],", "item in kwargs.items(): payload[item[0]] = item[1] return self._execute('apply', payload=payload, response_object=None)", "InputParameters (str): The input arguments of the test. - async_operation", "*args, **kwargs): # type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes", "(int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property def FastPathRate(self): # type:", "IxNetwork sends through fast path. - FastPathRate (number): Specifies the", "import List, Any, Union class LearnFrames(Base): \"\"\"The learning frames that", "payload[item[0]] = item[1] return self._execute('generateReport', payload=payload, response_object=None) def Run(self, *args,", "value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property def", "'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate': 'fastPathRate', 'LearnFrameSize': 'learnFrameSize', 'LearnFrequency': 'learnFrequency', 'LearnNumFrames':", "\"\"\" Returns ------- - str(never | onBinaryIteration | oncePerFramesize |", "(int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property def LearnSendMacOnly(self): # type:", "None] \"\"\"Executes the run operation on the server. Starts the", "learning frames during the test. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def", "str, int, int, bool, bool, int, int) -> LearnFrames \"\"\"Updates", "the server. applyAsyncResult(async_operation=bool)bool ------------------------------------------ - async_operation (bool=False): True to execute", "and/or sell copies of the Software, and to permit persons", "the server. Starts the specified Quick Test. The IxNetwork model", "the number of learning frames that IxNetwork sends for each", "(bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property def LearnWaitTime(self): # type:", "in kwargs.items(): payload[item[0]] = item[1] return self._execute('generateReport', payload=payload, response_object=None) def", "from uhd_restpy.files import Files from typing import List, Any, Union", "address. - LearnRate (number): Specifies the rate at which IxNetwork", "server. Stops the currently running Quick Test. stop(async_operation=bool) -------------------------- -", "Run(self, *args, **kwargs): # type: (*Any, **Any) -> Union[List[str], None]", "of learn frames that IxNetwork sends through fast path. -", "server. Waits for the execution of the specified Quick Test", "LICENSE # # Copyright 1997 - 2020 by IXIA Keysight", "def LearnWaitTimeBeforeTransmit(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value)", "@FastPathEnable.setter def FastPathEnable(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'],", "the result of the test. run(InputParameters=string, async_operation=bool)list ----------------------------------------------------- - InputParameters", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS", "\"\"\"Executes the applyAsync operation on the server. applyAsync(async_operation=bool) -------------------------------- -", "the operation asynchronously. Any subsequent rest api calls made through", "LearnFrequency (str(never | onBinaryIteration | oncePerFramesize | oncePerTest | onTrial)):", "= item[1] return self._execute('applyAsync', payload=payload, response_object=None) def ApplyAsyncResult(self, *args, **kwargs):", "-> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property def LearnFrameSize(self): # type: ()", "-> Union[List[str], None] \"\"\"Executes the run operation on the server.", "# type: (*Any, **Any) -> None \"\"\"Executes the start operation", "block until the operation is complete. - Returns list(str): This", "of charge, to any person obtaining a copy # of", "(i + 2)] = args[i] for item in kwargs.items(): payload[item[0]]", "to execute the operation asynchronously. Any subsequent rest api calls", "self._execute('apply', payload=payload, response_object=None) def ApplyAsync(self, *args, **kwargs): # type: (*Any,", "in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsyncResult', payload=payload, response_object=None) def", "OTHER DEALINGS IN # THE SOFTWARE. from uhd_restpy.base import Base", "This method is synchronous and returns the result of the", "Signatures with the same name while python does not. start(async_operation=bool)", "# type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property def LearnFrequency(self):", "block until the operation is complete. - Returns str: This", "Base from uhd_restpy.files import Files from typing import List, Any,", "-> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property def LearnWaitTime(self): # type: ()", "return self._execute('applyAsync', payload=payload, response_object=None) def ApplyAsyncResult(self, *args, **kwargs): # type:", "software and associated documentation files (the \"Software\"), # to deal", "def LearnWaitTime(self): # type: () -> int \"\"\" Returns -------", "item[1] return self._execute('run', payload=payload, response_object=None) def Start(self, *args, **kwargs): #", "and has no return value. Raises ------ - NotFoundError: The", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #", "@property def LearnSendRouterSolicitation(self): # type: () -> bool \"\"\" Returns", "complete. Raises ------ - NotFoundError: The requested resource does not", "during the test. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def LearnFrequency(self, value):", "is asynchronous and has no return value. Raises ------ -", "encountered an uncategorized error condition \"\"\" payload = { \"Arg1\":", "type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def update(self, FastPathEnable=None, FastPathLearnFrameSize=None,", "name while python does not. start(async_operation=bool) --------------------------- - async_operation (bool=False):", "'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP = { 'learnFrequency': ['never', 'onBinaryIteration', 'oncePerFramesize', 'oncePerTest',", "# type: (*Any, **Any) -> Union[str, None] \"\"\"Executes the generateReport", "The input arguments of the test. - async_operation (bool=False): True", "solicitation messages. - LearnWaitTime (number): Specifies the length of time", "# THE SOFTWARE. from uhd_restpy.base import Base from uhd_restpy.files import", "} _SDM_ENUM_MAP = { 'learnFrequency': ['never', 'onBinaryIteration', 'oncePerFramesize', 'oncePerTest', 'onTrial'],", "\"\"\" Returns ------- - number: Specifies the number of learning", "payload[item[0]] = item[1] return self._execute('applyITWizardConfiguration', payload=payload, response_object=None) def GenerateReport(self, *args,", "Raises ------ - NotFoundError: The requested resource does not exist", "of learning frames that IxNetwork sends for each address. \"\"\"", "return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def Apply(self, *args, **kwargs): # type: (*Any,", "range(len(args)): payload['Arg%s' % (i + 2)] = args[i] for item", "that IxNetwork sends through fast path. - FastPathRate (number): Specifies", "oncePerTest | onTrial)): Allows to choose how frequently IxNetwork sends", "stop operation on the server. Stops the currently running Quick", "item[1] return self._execute('stop', payload=payload, response_object=None) def WaitForTest(self, *args, **kwargs): #", "-> bool \"\"\" Returns ------- - bool: If true, enables", "item[1] return self._execute('generateReport', payload=payload, response_object=None) def Run(self, *args, **kwargs): #", "persons to whom the # Software is furnished to do", "(*Any, **Any) -> Union[str, None] \"\"\"Executes the generateReport operation on", "MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "MIT LICENSE # # Copyright 1997 - 2020 by IXIA", "value) @property def FastPathRate(self): # type: () -> int \"\"\"", "of the learning frames. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def LearnFrameSize(self,", "\"\"\" Returns ------- - bool: Sends router solicitation messages. \"\"\"", "condition \"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def Apply(self, *args, **kwargs): #", "args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('run',", "def LearnFrequency(self, value): # type: (str) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value)", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY,", "generateReport operation on the server. Generate a PDF report for", "true, enables fast path transmit. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def", "sends learning frames during the test. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter", "@FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'],", "payload=payload, response_object=None) def WaitForTest(self, *args, **kwargs): # type: (*Any, **Any)", "(bool=False): True to execute the operation asynchronously. Any subsequent rest", "will block until the operation is complete. - Returns list(str):", "the applyAsyncResult operation on the server. applyAsyncResult(async_operation=bool)bool ------------------------------------------ - async_operation", "- number: Specifies the rate at which IxNetwork sends learn", "self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property def LearnNumFrames(self): # type: () -> int", "and waits for its execution to finish. The IxNetwork model", "with the same name while python does not. run(async_operation=bool)list -----------------------------", "learning frames to MAC address only. - LearnSendRouterSolicitation (bool): Sends", "LearnSendRouterSolicitation(self): # type: () -> bool \"\"\" Returns ------- -", "do so, subject to the following conditions: # # The", "------ - ServerError: The server has encountered an uncategorized error", "with the same name while python does not. start(async_operation=bool) ---------------------------", "LearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property", "bool \"\"\" Returns ------- - bool: If true, enables fast", "shall be included in # all copies or substantial portions", "KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO", "run(async_operation=bool)list ----------------------------- - async_operation (bool=False): True to execute the operation", "OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "of the learning frames. - LearnFrequency (str(never | onBinaryIteration |", "the Software without restriction, including without limitation # the rights", "'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP = { 'learnFrequency': ['never',", "= { 'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate': 'fastPathRate',", "through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def FastPathNumFrames(self, value):", "def Run(self, *args, **kwargs): # type: (*Any, **Any) -> Union[List[str],", "payload[item[0]] = item[1] return self._execute('stop', payload=payload, response_object=None) def WaitForTest(self, *args,", "Quick Test to be completed. waitForTest(async_operation=bool)list ------------------------------------- - async_operation (bool=False):", "'learnFrequency', 'LearnNumFrames': 'learnNumFrames', 'LearnRate': 'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime':", "does not. start(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute", "% (i + 2)] = args[i] for item in kwargs.items():", "Sends learning frames to MAC address only. - LearnSendRouterSolicitation (bool):", "\"\"\"Executes the start operation on the server. Starts the specified", "Returns bool: Raises ------ - NotFoundError: The requested resource does", "frames in the fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def", "to the DUT. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def LearnRate(self, value):", "server - ServerError: The server has encountered an uncategorized error", "- number: Specifies the size of the learning frames in", "type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property def LearnFrequency(self): #", "int) -> LearnFrames \"\"\"Updates learnFrames resource on the server. Args", "frames in the fast path. - FastPathNumFrames (number): Specifies the", "\"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def Apply(self, *args, **kwargs): # type:", "Applies the specified Quick Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ - async_operation (bool=False):", "complete. - Returns str: This method is asynchronous and has", "fast path transmit. - FastPathLearnFrameSize (number): Specifies the size of", "FastPathEnable(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property", "IxNetwork sends learn frames through fast path. - LearnFrameSize (number):", "-> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property def FastPathLearnFrameSize(self): # type: ()", "is complete. - Returns str: This method is asynchronous and", "self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def LearnNumFrames(self, value): # type: (int) -> None", "type: (*Any, **Any) -> None \"\"\"Executes the applyAsync operation on", "FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): #", "Returns ------- - number: Specifies the rate at which IxNetwork", "in kwargs.items(): payload[item[0]] = item[1] return self._execute('run', payload=payload, response_object=None) def", "type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property def LearnFrameSize(self): #", "error condition \"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def Apply(self, *args, **kwargs):", "response_object=None) def GenerateReport(self, *args, **kwargs): # type: (*Any, **Any) ->", "WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND", "from uhd_restpy.base import Base from uhd_restpy.files import Files from typing", "def LearnFrequency(self): # type: () -> str \"\"\" Returns -------", "(*Any, **Any) -> None \"\"\"Executes the apply operation on the", "the specified Quick Test. The IxNetwork model allows for multiple", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED,", "to choose how frequently IxNetwork sends learning frames during the", "the size of the learning frames in the fast path.", "on the server - ServerError: The server has encountered an", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #", "-> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property def LearnRate(self): # type: ()", "the same name while python does not. start(async_operation=bool) --------------------------- -", "{ \"Arg1\": self.href } for i in range(len(args)): payload['Arg%s' %", "in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyITWizardConfiguration', payload=payload, response_object=None) def", "(number): Specifies the number of learn frames that IxNetwork sends", "onTrial)): Allows to choose how frequently IxNetwork sends learning frames", "copies or substantial portions of the Software. # # THE", "_SDM_ENUM_MAP = { 'learnFrequency': ['never', 'onBinaryIteration', 'oncePerFramesize', 'oncePerTest', 'onTrial'], }", "The LearnFrames class encapsulates a required learnFrames resource which will", "IxNetwork sends during the test. The LearnFrames class encapsulates a", "the server. Starts the specified Quick Test and waits for", "the server. Stops the currently running Quick Test. stop(async_operation=bool) --------------------------", "() -> bool \"\"\" Returns ------- - bool: If true,", "value) @property def LearnRate(self): # type: () -> int \"\"\"", "learning frames that IxNetwork sends for each address. \"\"\" return", "IxNetwork sends learning frames during the test. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency'])", "Union class LearnFrames(Base): \"\"\"The learning frames that IxNetwork sends during", "LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): # type: (bool, int, int, int,", "OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT", "value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def update(self,", "# MIT LICENSE # # Copyright 1997 - 2020 by", "through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def FastPathRate(self, value):", "waits for its execution to finish. The IxNetwork model allows", "model allows for multiple method Signatures with the same name", "(number): Specifies the size of the learning frames. - LearnFrequency", "# type: (str) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property def LearnNumFrames(self):", "\"\"\"Executes the applyAsyncResult operation on the server. applyAsyncResult(async_operation=bool)bool ------------------------------------------ -", "**Any) -> None \"\"\"Executes the apply operation on the server.", "Returns str: This method is asynchronous and has no return", "in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsync', payload=payload, response_object=None) def", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "specified Quick Test and waits for its execution to finish.", "\"\"\" payload = { \"Arg1\": self.href } for i in", "self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property def LearnSendRouterSolicitation(self): # type: () -> bool", "self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def LearnWaitTime(self, value): # type: (int) -> None", "value. Raises ------ - NotFoundError: The requested resource does not", "is complete. - Returns list(str): This method is synchronous and", "LearnSendMacOnly(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property", "| onBinaryIteration | oncePerFramesize | oncePerTest | onTrial)): Allows to", "resource on the server. Args ---- - FastPathEnable (bool): If", "self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property def LearnSendMacOnly(self): # type: () -> bool", "the stop operation on the server. Stops the currently running", "the fast path. - FastPathNumFrames (number): Specifies the number of", "'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP = {", "@LearnWaitTime.setter def LearnWaitTime(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'],", "last succesfull test run. generateReport(async_operation=bool)string ------------------------------------------ - async_operation (bool=False): True", "response_object=None) def Start(self, *args, **kwargs): # type: (*Any, **Any) ->", "to use, copy, modify, merge, publish, distribute, sublicense, # and/or", "ApplyAsyncResult(self, *args, **kwargs): # type: (*Any, **Any) -> Union[bool, None]", "type: (*Any, **Any) -> None \"\"\"Executes the apply operation on", "LearnRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property", "obtaining a copy # of this software and associated documentation", "-------------------------------- - async_operation (bool=False): True to execute the operation asynchronously.", "'LearnRate': 'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit',", "async_operation (bool=False): True to execute the operation asynchronously. Any subsequent", "THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "int \"\"\" Returns ------- - number: Specifies the length of", "The above copyright notice and this permission notice shall be", "in the Software without restriction, including without limitation # the", "NotFoundError: The requested resource does not exist on the server", "def FastPathNumFrames(self): # type: () -> int \"\"\" Returns -------", "without limitation # the rights to use, copy, modify, merge,", "learning frames that IxNetwork sends for each address. - LearnRate", "**kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the stop", "def FastPathEnable(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value)", "the operation is complete. start(InputParameters=string, async_operation=bool) --------------------------------------------------- - InputParameters (str):", "(bool): Sends router solicitation messages. - LearnWaitTime (number): Specifies the", "start(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute the operation", "None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property def FastPathNumFrames(self): # type: () ->", "sending all the learning frames from all the ports. \"\"\"", "= item[1] return self._execute('applyITWizardConfiguration', payload=payload, response_object=None) def GenerateReport(self, *args, **kwargs):" ]
[ "rest_framework import serializers from core import models class AssetSerializer(serializers.ModelSerializer): class", "import models class AssetSerializer(serializers.ModelSerializer): class Meta: model = models.Asset fields", "import serializers from core import models class AssetSerializer(serializers.ModelSerializer): class Meta:", "from core import models class AssetSerializer(serializers.ModelSerializer): class Meta: model =", "models class AssetSerializer(serializers.ModelSerializer): class Meta: model = models.Asset fields =", "from rest_framework import serializers from core import models class AssetSerializer(serializers.ModelSerializer):", "class AssetSerializer(serializers.ModelSerializer): class Meta: model = models.Asset fields = '__all__'", "serializers from core import models class AssetSerializer(serializers.ModelSerializer): class Meta: model", "core import models class AssetSerializer(serializers.ModelSerializer): class Meta: model = models.Asset" ]
[ "\"\"\" c(2). p(1). a(2). d(2,2,1). okay(X):- c(X), #count{V:a(V),d(V,X,1)} = 1.", "= 1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)} = 1. \"\"\" output =", "\"\"\" output = \"\"\" {a(2), c(2), d(2,2,1), okay(2), p(1)} \"\"\"", "ouch(X):- p(X), #count{V:a(V),d(V,X,1)} = 1. \"\"\" output = \"\"\" {a(2),", "p(X), #count{V:a(V),d(V,X,1)} = 1. \"\"\" output = \"\"\" {a(2), c(2),", "= \"\"\" c(2). p(1). a(2). d(2,2,1). okay(X):- c(X), #count{V:a(V),d(V,X,1)} =", "okay(X):- c(X), #count{V:a(V),d(V,X,1)} = 1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)} = 1.", "c(X), #count{V:a(V),d(V,X,1)} = 1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)} = 1. \"\"\"", "#count{V:a(V),d(V,X,1)} = 1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)} = 1. \"\"\" output", "c(2). p(1). a(2). d(2,2,1). okay(X):- c(X), #count{V:a(V),d(V,X,1)} = 1. ouch(X):-", "p(1). a(2). d(2,2,1). okay(X):- c(X), #count{V:a(V),d(V,X,1)} = 1. ouch(X):- p(X),", "input = \"\"\" c(2). p(1). a(2). d(2,2,1). okay(X):- c(X), #count{V:a(V),d(V,X,1)}", "#count{V:a(V),d(V,X,1)} = 1. \"\"\" output = \"\"\" {a(2), c(2), d(2,2,1),", "= 1. \"\"\" output = \"\"\" {a(2), c(2), d(2,2,1), okay(2),", "d(2,2,1). okay(X):- c(X), #count{V:a(V),d(V,X,1)} = 1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)} =", "1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)} = 1. \"\"\" output = \"\"\"", "<filename>tests/wasp1/AllAnswerSets/aggregates_count_boundvariables_1.test.py input = \"\"\" c(2). p(1). a(2). d(2,2,1). okay(X):- c(X),", "a(2). d(2,2,1). okay(X):- c(X), #count{V:a(V),d(V,X,1)} = 1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)}", "1. \"\"\" output = \"\"\" {a(2), c(2), d(2,2,1), okay(2), p(1)}" ]
[ "str(salt) + \"&sign=\" + sign ) async with ClientSession() as", "langdetect import detect, detect_langs from aiohttp import ClientSession from nonebot", "def _(session: CommandSession): arg = session.current_arg_text.strip() if session.is_first_run: parser =", "= \"en\" else: session.state[\"to\"] = \"zh\" else: session.state[\"to\"] = argv.to", "= \"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" + ans) @wm.args_parser", "session.state[\"to\"] = \"en\" else: session.state[\"to\"] = \"zh\" else: session.state[\"to\"] =", "coding=utf-8 import hashlib import urllib import random @on_command(\"wm\", aliases={\"翻译\", \"translate\"},", "ShitAns = await resp.json() try: ans = [i[\"dst\"] for i", "session.finish(\"翻译结果为:\\n\" + ans) @wm.args_parser async def _(session: CommandSession): arg =", "session.state[\"fr\"] = \"zh\" if argv.to == \"no\": if session.state[\"fr\"] ==", "import get_bot from nonebot.argparse import ArgumentParser import time import hmac", "session.is_first_run: parser = ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\", type=str, default=\"no\") parser.add_argument(\"--to\", \"-t\",", "get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8 import hashlib import urllib import", "toLang + \"&salt=\" + str(salt) + \"&sign=\" + sign )", "+ myurl) as resp: if resp.status != 200: pass ShitAns", "\"zh\" else: session.state[\"to\"] = argv.to if argv.fr == \"no\": session.state[\"fr\"]", "+ sign ) async with ClientSession() as sess: async with", "myurl) as resp: if resp.status != 200: pass ShitAns =", "else: session.state[\"to\"] = \"zh\" else: session.state[\"to\"] = argv.to if argv.fr", "resp.status != 200: pass ShitAns = await resp.json() try: ans", "== \"zh\": session.state[\"to\"] = \"en\" else: session.state[\"to\"] = \"zh\" else:", "arg = \" \".join(argv.token) if arg == \"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"]", "i in ShitAns[\"trans_result\"]] ans = \"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"])", "sess: async with sess.get(\"https://fanyi-api.baidu.com\" + myurl) as resp: if resp.status", "= session.state[\"token\"] fromLang = session.state[\"fr\"] # 原文语种 toLang = session.state[\"to\"]", "default=\"no\") parser.add_argument(\"token\", type=str, default=\"\", nargs=\"+\") argv = parser.parse_args(session.current_arg.split(\" \")) arg", "+ str(salt) + \"&sign=\" + sign ) async with ClientSession()", "q + str(salt) + bot.config.BAIDUKey sign = hashlib.md5(sign.encode()).hexdigest() myurl =", "_(session: CommandSession): arg = session.current_arg_text.strip() if session.is_first_run: parser = ArgumentParser(session=session)", "65536) sign = bot.config.BAIDUAPI + q + str(salt) + bot.config.BAIDUKey", "session.pause(\"输入不能为空哦!\") session.state[\"fr\"] = detect(arg) if argv.fr == \"no\" else argv.fr", "= \"zh\" if argv.to == \"no\": if session.state[\"fr\"] == \"zh\":", "from nonebot import CommandSession, on_command from langdetect import detect, detect_langs", "== \"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"] = detect(arg) if argv.fr == \"no\"", "sys import hashlib import binascii import urllib bot = get_bot()", "\"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"] = detect(arg) if argv.fr == \"no\" else", "import urllib import random @on_command(\"wm\", aliases={\"翻译\", \"translate\"}, only_to_me=False) async def", "async with ClientSession() as sess: async with sess.get(\"https://fanyi-api.baidu.com\" + myurl)", "<filename>Pzzzzz/plugins/wm.py from nonebot import CommandSession, on_command from langdetect import detect,", "if session.state[\"fr\"][:2] == \"zh\": session.state[\"fr\"] = \"zh\" if argv.to ==", "hashlib import urllib import random @on_command(\"wm\", aliases={\"翻译\", \"translate\"}, only_to_me=False) async", "@wm.args_parser async def _(session: CommandSession): arg = session.current_arg_text.strip() if session.is_first_run:", "if session.state[\"fr\"] == \"zh\": session.state[\"to\"] = \"en\" else: session.state[\"to\"] =", "toLang = session.state[\"to\"] # 译文语种 salt = random.randint(32768, 65536) sign", "\" \".join(argv.token) if arg == \"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"] = detect(arg)", "@on_command(\"wm\", aliases={\"翻译\", \"translate\"}, only_to_me=False) async def wm(session: CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\")", "hmac import random, sys import hashlib import binascii import urllib", "in ShitAns[\"trans_result\"]] ans = \"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\"", "= [i[\"dst\"] for i in ShitAns[\"trans_result\"]] ans = \"\\n\".join(ans) except:", "ans) @wm.args_parser async def _(session: CommandSession): arg = session.current_arg_text.strip() if", "argv.fr if session.state[\"fr\"][:2] == \"zh\": session.state[\"fr\"] = \"zh\" if argv.to", "+ \"&to=\" + toLang + \"&salt=\" + str(salt) + \"&sign=\"", "time import hmac import random, sys import hashlib import binascii", "bot = get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8 import hashlib import", "random @on_command(\"wm\", aliases={\"翻译\", \"translate\"}, only_to_me=False) async def wm(session: CommandSession): session.get(\"token\",", "async def _(session: CommandSession): arg = session.current_arg_text.strip() if session.is_first_run: parser", "wm(session: CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl = \"/api/trans/vip/translate\" q = session.state[\"token\"]", ") async with ClientSession() as sess: async with sess.get(\"https://fanyi-api.baidu.com\" +", "parser.add_argument(\"token\", type=str, default=\"\", nargs=\"+\") argv = parser.parse_args(session.current_arg.split(\" \")) arg =", "arg = session.current_arg_text.strip() if session.is_first_run: parser = ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\",", "urllib.parse.quote(q) + \"&from=\" + fromLang + \"&to=\" + toLang +", "import CommandSession, on_command from langdetect import detect, detect_langs from aiohttp", "myurl = \"/api/trans/vip/translate\" q = session.state[\"token\"] fromLang = session.state[\"fr\"] #", "binascii import urllib bot = get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8", "CommandSession, on_command from langdetect import detect, detect_langs from aiohttp import", "argv.to == \"no\": if session.state[\"fr\"] == \"zh\": session.state[\"to\"] = \"en\"", "= ( myurl + \"?appid=\" + bot.config.BAIDUAPI + \"&q=\" +", "ans = [i[\"dst\"] for i in ShitAns[\"trans_result\"]] ans = \"\\n\".join(ans)", "type=str, default=\"no\") parser.add_argument(\"--to\", \"-t\", type=str, default=\"no\") parser.add_argument(\"token\", type=str, default=\"\", nargs=\"+\")", "\"zh\": session.state[\"fr\"] = \"zh\" if argv.to == \"no\": if session.state[\"fr\"]", "arg == \"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"] = detect(arg) if argv.fr ==", "as resp: if resp.status != 200: pass ShitAns = await", "\"no\" else argv.fr if session.state[\"fr\"][:2] == \"zh\": session.state[\"fr\"] = \"zh\"", "else: session.state[\"to\"] = argv.to if argv.fr == \"no\": session.state[\"fr\"] =", "def wm(session: CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl = \"/api/trans/vip/translate\" q =", "\"-t\", type=str, default=\"no\") parser.add_argument(\"token\", type=str, default=\"\", nargs=\"+\") argv = parser.parse_args(session.current_arg.split(\"", "argv.to if argv.fr == \"no\": session.state[\"fr\"] = \"auto\" session.state[\"token\"] =", "\"no\": if session.state[\"fr\"] == \"zh\": session.state[\"to\"] = \"en\" else: session.state[\"to\"]", "if argv.fr == \"no\": session.state[\"fr\"] = \"auto\" session.state[\"token\"] = arg", "async with sess.get(\"https://fanyi-api.baidu.com\" + myurl) as resp: if resp.status !=", "session.state[\"fr\"] # 原文语种 toLang = session.state[\"to\"] # 译文语种 salt =", "\".join(argv.token) if arg == \"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"] = detect(arg) if", "session.state[\"to\"] # 译文语种 salt = random.randint(32768, 65536) sign = bot.config.BAIDUAPI", "== \"no\" else argv.fr if session.state[\"fr\"][:2] == \"zh\": session.state[\"fr\"] =", "from nonebot import get_bot from nonebot.argparse import ArgumentParser import time", "\"zh\" if argv.to == \"no\": if session.state[\"fr\"] == \"zh\": session.state[\"to\"]", "+ \"&sign=\" + sign ) async with ClientSession() as sess:", "str(salt) + bot.config.BAIDUKey sign = hashlib.md5(sign.encode()).hexdigest() myurl = ( myurl", "= session.state[\"to\"] # 译文语种 salt = random.randint(32768, 65536) sign =", "ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" + ans) @wm.args_parser async def _(session: CommandSession): arg", "salt = random.randint(32768, 65536) sign = bot.config.BAIDUAPI + q +", "== \"zh\": session.state[\"fr\"] = \"zh\" if argv.to == \"no\": if", "nonebot import CommandSession, on_command from langdetect import detect, detect_langs from", "\"&sign=\" + sign ) async with ClientSession() as sess: async", "import hashlib import binascii import urllib bot = get_bot() #", "+ bot.config.BAIDUAPI + \"&q=\" + urllib.parse.quote(q) + \"&from=\" + fromLang", "\"&q=\" + urllib.parse.quote(q) + \"&from=\" + fromLang + \"&to=\" +", "session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl = \"/api/trans/vip/translate\" q = session.state[\"token\"] fromLang =", "on_command from langdetect import detect, detect_langs from aiohttp import ClientSession", "+ \"&q=\" + urllib.parse.quote(q) + \"&from=\" + fromLang + \"&to=\"", "only_to_me=False) async def wm(session: CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl = \"/api/trans/vip/translate\"", "# coding=utf-8 import hashlib import urllib import random @on_command(\"wm\", aliases={\"翻译\",", "= \"/api/trans/vip/translate\" q = session.state[\"token\"] fromLang = session.state[\"fr\"] # 原文语种", "as sess: async with sess.get(\"https://fanyi-api.baidu.com\" + myurl) as resp: if", "random, sys import hashlib import binascii import urllib bot =", "prompt=\"请输入你想翻译的句子!\") myurl = \"/api/trans/vip/translate\" q = session.state[\"token\"] fromLang = session.state[\"fr\"]", "ShitAns[\"trans_result\"]] ans = \"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" +", "myurl = ( myurl + \"?appid=\" + bot.config.BAIDUAPI + \"&q=\"", "session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" + ans) @wm.args_parser async def _(session:", "= await resp.json() try: ans = [i[\"dst\"] for i in", "default=\"no\") parser.add_argument(\"--to\", \"-t\", type=str, default=\"no\") parser.add_argument(\"token\", type=str, default=\"\", nargs=\"+\") argv", "detect_langs from aiohttp import ClientSession from nonebot import get_bot from", "except: session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" + ans) @wm.args_parser async def", "if arg == \"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"] = detect(arg) if argv.fr", "= bot.config.BAIDUAPI + q + str(salt) + bot.config.BAIDUKey sign =", "= random.randint(32768, 65536) sign = bot.config.BAIDUAPI + q + str(salt)", "if session.is_first_run: parser = ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\", type=str, default=\"no\") parser.add_argument(\"--to\",", "await resp.json() try: ans = [i[\"dst\"] for i in ShitAns[\"trans_result\"]]", "else argv.fr if session.state[\"fr\"][:2] == \"zh\": session.state[\"fr\"] = \"zh\" if", "import ClientSession from nonebot import get_bot from nonebot.argparse import ArgumentParser", "nonebot import get_bot from nonebot.argparse import ArgumentParser import time import", "import random @on_command(\"wm\", aliases={\"翻译\", \"translate\"}, only_to_me=False) async def wm(session: CommandSession):", "( myurl + \"?appid=\" + bot.config.BAIDUAPI + \"&q=\" + urllib.parse.quote(q)", "= get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8 import hashlib import urllib", "\"/api/trans/vip/translate\" q = session.state[\"token\"] fromLang = session.state[\"fr\"] # 原文语种 toLang", "import detect, detect_langs from aiohttp import ClientSession from nonebot import", "for i in ShitAns[\"trans_result\"]] ans = \"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\" +", "pass ShitAns = await resp.json() try: ans = [i[\"dst\"] for", "import binascii import urllib bot = get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> #", "urllib bot = get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8 import hashlib", "\"translate\"}, only_to_me=False) async def wm(session: CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl =", "\"&from=\" + fromLang + \"&to=\" + toLang + \"&salt=\" +", "if argv.to == \"no\": if session.state[\"fr\"] == \"zh\": session.state[\"to\"] =", "if argv.fr == \"no\" else argv.fr if session.state[\"fr\"][:2] == \"zh\":", "import hashlib import urllib import random @on_command(\"wm\", aliases={\"翻译\", \"translate\"}, only_to_me=False)", "session.state[\"to\"] = \"zh\" else: session.state[\"to\"] = argv.to if argv.fr ==", "from nonebot.argparse import ArgumentParser import time import hmac import random,", "\"zh\": session.state[\"to\"] = \"en\" else: session.state[\"to\"] = \"zh\" else: session.state[\"to\"]", "+ urllib.parse.quote(q) + \"&from=\" + fromLang + \"&to=\" + toLang", "import ArgumentParser import time import hmac import random, sys import", "get_bot from nonebot.argparse import ArgumentParser import time import hmac import", "+ \"&salt=\" + str(salt) + \"&sign=\" + sign ) async", "CommandSession): arg = session.current_arg_text.strip() if session.is_first_run: parser = ArgumentParser(session=session) parser.add_argument(\"--fr\",", "bot.config.BAIDUKey sign = hashlib.md5(sign.encode()).hexdigest() myurl = ( myurl + \"?appid=\"", "= \"zh\" else: session.state[\"to\"] = argv.to if argv.fr == \"no\":", "= ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\", type=str, default=\"no\") parser.add_argument(\"--to\", \"-t\", type=str, default=\"no\")", "session.state[\"fr\"] == \"zh\": session.state[\"to\"] = \"en\" else: session.state[\"to\"] = \"zh\"", "\"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" + ans) @wm.args_parser async", "argv.fr == \"no\" else argv.fr if session.state[\"fr\"][:2] == \"zh\": session.state[\"fr\"]", "+ bot.config.BAIDUKey sign = hashlib.md5(sign.encode()).hexdigest() myurl = ( myurl +", "aiohttp import ClientSession from nonebot import get_bot from nonebot.argparse import", "import hmac import random, sys import hashlib import binascii import", "import urllib bot = get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8 import", "= parser.parse_args(session.current_arg.split(\" \")) arg = \" \".join(argv.token) if arg ==", "hashlib import binascii import urllib bot = get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL>", "ArgumentParser import time import hmac import random, sys import hashlib", "parser.add_argument(\"--fr\", \"-f\", type=str, default=\"no\") parser.add_argument(\"--to\", \"-t\", type=str, default=\"no\") parser.add_argument(\"token\", type=str,", "fromLang = session.state[\"fr\"] # 原文语种 toLang = session.state[\"to\"] # 译文语种", "= session.state[\"fr\"] # 原文语种 toLang = session.state[\"to\"] # 译文语种 salt", "# 译文语种 salt = random.randint(32768, 65536) sign = bot.config.BAIDUAPI +", "bot.config.BAIDUAPI + q + str(salt) + bot.config.BAIDUKey sign = hashlib.md5(sign.encode()).hexdigest()", "async def wm(session: CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl = \"/api/trans/vip/translate\" q", "session.state[\"token\"] fromLang = session.state[\"fr\"] # 原文语种 toLang = session.state[\"to\"] #", "[i[\"dst\"] for i in ShitAns[\"trans_result\"]] ans = \"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\"", "parser.add_argument(\"--to\", \"-t\", type=str, default=\"no\") parser.add_argument(\"token\", type=str, default=\"\", nargs=\"+\") argv =", "aliases={\"翻译\", \"translate\"}, only_to_me=False) async def wm(session: CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl", "type=str, default=\"no\") parser.add_argument(\"token\", type=str, default=\"\", nargs=\"+\") argv = parser.parse_args(session.current_arg.split(\" \"))", "myurl + \"?appid=\" + bot.config.BAIDUAPI + \"&q=\" + urllib.parse.quote(q) +", "nargs=\"+\") argv = parser.parse_args(session.current_arg.split(\" \")) arg = \" \".join(argv.token) if", "default=\"\", nargs=\"+\") argv = parser.parse_args(session.current_arg.split(\" \")) arg = \" \".join(argv.token)", "session.current_arg_text.strip() if session.is_first_run: parser = ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\", type=str, default=\"no\")", "sess.get(\"https://fanyi-api.baidu.com\" + myurl) as resp: if resp.status != 200: pass", "session.state[\"fr\"][:2] == \"zh\": session.state[\"fr\"] = \"zh\" if argv.to == \"no\":", "parser = ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\", type=str, default=\"no\") parser.add_argument(\"--to\", \"-t\", type=str,", "+ ans) @wm.args_parser async def _(session: CommandSession): arg = session.current_arg_text.strip()", "!= 200: pass ShitAns = await resp.json() try: ans =", "+ q + str(salt) + bot.config.BAIDUKey sign = hashlib.md5(sign.encode()).hexdigest() myurl", "+ \"?appid=\" + bot.config.BAIDUAPI + \"&q=\" + urllib.parse.quote(q) + \"&from=\"", "+ ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" + ans) @wm.args_parser async def _(session: CommandSession):", "CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl = \"/api/trans/vip/translate\" q = session.state[\"token\"] fromLang", "+ toLang + \"&salt=\" + str(salt) + \"&sign=\" + sign", "ClientSession() as sess: async with sess.get(\"https://fanyi-api.baidu.com\" + myurl) as resp:", "= argv.to if argv.fr == \"no\": session.state[\"fr\"] = \"auto\" session.state[\"token\"]", "译文语种 salt = random.randint(32768, 65536) sign = bot.config.BAIDUAPI + q", "detect, detect_langs from aiohttp import ClientSession from nonebot import get_bot", "detect(arg) if argv.fr == \"no\" else argv.fr if session.state[\"fr\"][:2] ==", "session.state[\"fr\"] = detect(arg) if argv.fr == \"no\" else argv.fr if", "random.randint(32768, 65536) sign = bot.config.BAIDUAPI + q + str(salt) +", "== \"no\": if session.state[\"fr\"] == \"zh\": session.state[\"to\"] = \"en\" else:", "sign = bot.config.BAIDUAPI + q + str(salt) + bot.config.BAIDUKey sign", "\"&salt=\" + str(salt) + \"&sign=\" + sign ) async with", "= hashlib.md5(sign.encode()).hexdigest() myurl = ( myurl + \"?appid=\" + bot.config.BAIDUAPI", "ClientSession from nonebot import get_bot from nonebot.argparse import ArgumentParser import", "+ str(salt) + bot.config.BAIDUKey sign = hashlib.md5(sign.encode()).hexdigest() myurl = (", "200: pass ShitAns = await resp.json() try: ans = [i[\"dst\"]", "\"&to=\" + toLang + \"&salt=\" + str(salt) + \"&sign=\" +", "with ClientSession() as sess: async with sess.get(\"https://fanyi-api.baidu.com\" + myurl) as", "= session.current_arg_text.strip() if session.is_first_run: parser = ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\", type=str,", "argv = parser.parse_args(session.current_arg.split(\" \")) arg = \" \".join(argv.token) if arg", "session.state[\"to\"] = argv.to if argv.fr == \"no\": session.state[\"fr\"] = \"auto\"", "= detect(arg) if argv.fr == \"no\" else argv.fr if session.state[\"fr\"][:2]", "= \" \".join(argv.token) if arg == \"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"] =", "urllib import random @on_command(\"wm\", aliases={\"翻译\", \"translate\"}, only_to_me=False) async def wm(session:", "+ fromLang + \"&to=\" + toLang + \"&salt=\" + str(salt)", "sign = hashlib.md5(sign.encode()).hexdigest() myurl = ( myurl + \"?appid=\" +", "ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\", type=str, default=\"no\") parser.add_argument(\"--to\", \"-t\", type=str, default=\"no\") parser.add_argument(\"token\",", "parser.parse_args(session.current_arg.split(\" \")) arg = \" \".join(argv.token) if arg == \"\":", "if resp.status != 200: pass ShitAns = await resp.json() try:", "\"en\" else: session.state[\"to\"] = \"zh\" else: session.state[\"to\"] = argv.to if", "hashlib.md5(sign.encode()).hexdigest() myurl = ( myurl + \"?appid=\" + bot.config.BAIDUAPI +", "\"?appid=\" + bot.config.BAIDUAPI + \"&q=\" + urllib.parse.quote(q) + \"&from=\" +", "type=str, default=\"\", nargs=\"+\") argv = parser.parse_args(session.current_arg.split(\" \")) arg = \"", "import time import hmac import random, sys import hashlib import", "fromLang + \"&to=\" + toLang + \"&salt=\" + str(salt) +", "with sess.get(\"https://fanyi-api.baidu.com\" + myurl) as resp: if resp.status != 200:", "原文语种 toLang = session.state[\"to\"] # 译文语种 salt = random.randint(32768, 65536)", "resp: if resp.status != 200: pass ShitAns = await resp.json()", "# 原文语种 toLang = session.state[\"to\"] # 译文语种 salt = random.randint(32768,", "resp.json() try: ans = [i[\"dst\"] for i in ShitAns[\"trans_result\"]] ans", "sign ) async with ClientSession() as sess: async with sess.get(\"https://fanyi-api.baidu.com\"", "nonebot.argparse import ArgumentParser import time import hmac import random, sys", "+ \"&from=\" + fromLang + \"&to=\" + toLang + \"&salt=\"", "# 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8 import hashlib import urllib import random", "from langdetect import detect, detect_langs from aiohttp import ClientSession from", "ans = \"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" + ans)", "\"-f\", type=str, default=\"no\") parser.add_argument(\"--to\", \"-t\", type=str, default=\"no\") parser.add_argument(\"token\", type=str, default=\"\",", "q = session.state[\"token\"] fromLang = session.state[\"fr\"] # 原文语种 toLang =", "bot.config.BAIDUAPI + \"&q=\" + urllib.parse.quote(q) + \"&from=\" + fromLang +", "from aiohttp import ClientSession from nonebot import get_bot from nonebot.argparse", "import random, sys import hashlib import binascii import urllib bot", "try: ans = [i[\"dst\"] for i in ShitAns[\"trans_result\"]] ans =", "百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8 import hashlib import urllib import random @on_command(\"wm\",", "\")) arg = \" \".join(argv.token) if arg == \"\": session.pause(\"输入不能为空哦!\")" ]
[ "function.\" lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff,", "constraint into the model.\" assert const.optype is not None, \"You", "vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"int\")) def __rmul__(self, num): return", "< lp.maxvars, \"Can't add a variable: \" self.index = len(lp.vars)", "lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p),", "lib.delete_lp.argtypes = [c_void_p] lib.set_binary.argtypes = [c_void_p, c_int, c_ubyte] lib.set_binary.restype =", "self.debug: lib.write_lp(self.lp, b\"debug-model.lp\") else: lib.set_verbose(self.lp, 3) ret = lib.solve(self.lp) if", "[c_void_p, c_ubyte] lib.set_add_rowmode.restype = c_ubyte lib.set_maxim.argtypes = [c_void_p] lib.write_lp.argtypes =", "\"linux\": lib = cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" % ver))) else: raise ValueError(\"Can't", "return ret class LpVariable(object): \"A LpSolve variable.\" def __init__(self, lp,", "len(lp.vars) < lp.maxvars, \"Can't add a variable: \" self.index =", "ret = lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p)) assert ret", "c_ubyte] lib.set_add_rowmode.restype = c_ubyte lib.set_maxim.argtypes = [c_void_p] lib.write_lp.argtypes = [c_void_p,", "update_variables(self): \"Update the variable values.\" ret = lib.get_variables(self.lp, cast(self.rowbuff, c_double_p))", "and row buffers for calling LpSolve.\" for i, (num, var)", "= [c_void_p] lib.write_lp.argtypes = [c_void_p, c_char_p] lib.write_lp.restype = c_ubyte lib.set_verbose.argtypes", "the objective function.\" lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.set_obj_fnex(self.lp,", "c_ubyte lib.set_int.argtypes = [c_void_p, c_int, c_ubyte] lib.set_int.restype = c_ubyte lib.add_constraintex.argtypes", "in enumerate(self.vars): var.value = self.rowbuff[i] def solve(self): \"Solve the model.\"", "def solve(self): \"Solve the model.\" lib.set_maxim(self.lp) if self.debug: lib.write_lp(self.lp, b\"debug-model.lp\")", "+ other.numbers, self.vars + other.vars) assert len(c.vars) == len(set(c.vars)), \"Some", "elif sys.platform == \"linux\": lib = cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" % ver)))", "[c_int, c_int] lib.make_lp.restype = c_void_p lib.delete_lp.argtypes = [c_void_p] lib.set_binary.argtypes =", "self.rowbuff) ret = lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p), const.optype,", "in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"bin\")) elif \"int\"", "ret = lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p), const.optype, const.rhs)", "LpSolve model\" self.colbuff = (c_int * maxvars)() self.rowbuff = (c_double", "__del__(self): lib.delete_lp(self.lp) def constraint(self, const): \"Add a new constraint into", "self.rowbuff) ret = lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p)) assert", "(c_int * maxvars)() self.rowbuff = (c_double * maxvars)() lib.set_add_rowmode(self.lp, 1)", "lp lp.vars.append(self) self.type = \"real\" self.retype(vtype) def retype(self, vtype): \"Change", "maxvars)() lib.set_add_rowmode(self.lp, 1) def __del__(self): lib.delete_lp(self.lp) def constraint(self, const): \"Add", "path.dirname(__file__) if sys.platform == \"win32\": lib = windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" %", "(C) 2018, <NAME> License MIT \"\"\" from ctypes import *", "add a variable: \" self.index = len(lp.vars) + 1 self.value", "wrapper. Copyright (C) 2018, <NAME> License MIT \"\"\" from ctypes", "= maxvars self.vars = [] self.lp = lib.make_lp(0, maxvars) assert", "lib.delete_lp(self.lp) def constraint(self, const): \"Add a new constraint into the", "[c_void_p, c_int, c_double_p, c_int_p, c_int, c_double] lib.add_constraintex.restype = c_ubyte lib.set_obj_fnex.argtypes", "\"A LpSolve variable.\" def __init__(self, lp, vtype=\"real\"): assert len(lp.vars) <", "POINTER(c_double) c_int_p = POINTER(c_int) lib.make_lp.argtypes = [c_int, c_int] lib.make_lp.restype =", "lib.add_constraintex.argtypes = [c_void_p, c_int, c_double_p, c_int_p, c_int, c_double] lib.add_constraintex.restype =", "self def __eq__(self, val): self.optype, self.rhs = (3, val) return", "lib.make_lp(0, maxvars) assert self.lp != 0, \"Can't construct a new", "objective(self, const): \"Set the objective function.\" lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff, self.rowbuff)", "variable: \" self.index = len(lp.vars) + 1 self.value = None", "return LpConstraint([num], [self]) def __add__(self, other): if isinstance(other, LpConstraint): return", "retype(self, vtype): \"Change the type of the variable\" if \"bin\"", "def fill_buffers(self, colno, row): \"Fill colno and row buffers for", "= (\"x86\", \"x64\")[sys.maxsize > 2**32] here = path.dirname(__file__) if sys.platform", "= lib.get_variables(self.lp, cast(self.rowbuff, c_double_p)) assert ret == 1, \"Can't get", "+ other.vars) assert len(c.vars) == len(set(c.vars)), \"Some variables appear several", "RHS of constraint\" const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff,", "val) return self def __ge__(self, val): self.optype, self.rhs = (2,", "c_int, c_double_p, c_int_p] lib.set_obj_fnex.restype = c_ubyte lib.set_add_rowmode.argtypes = [c_void_p, c_ubyte]", "in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"int\")) def __rmul__(self,", "= num def __add__(self, other): if isinstance(other, LpVariable): return LpConstraint(self.numbers", "val): self.optype, self.rhs = (3, val) return self def __ge__(self,", "% ver))) else: raise ValueError(\"Can't load LpSolve library on this", "self.vars = [] self.lp = lib.make_lp(0, maxvars) assert self.lp !=", "c_int_p] lib.set_obj_fnex.restype = c_ubyte lib.set_add_rowmode.argtypes = [c_void_p, c_ubyte] lib.set_add_rowmode.restype =", "lib.get_variables(self.lp, cast(self.rowbuff, c_double_p)) assert ret == 1, \"Can't get variable", "\"dll/lpsolve55-%s.dll\" % ver))) elif sys.platform == \"linux\": lib = cdll.LoadLibrary(path.abspath(path.join(here,", "ver = (\"x86\", \"x64\")[sys.maxsize > 2**32] here = path.dirname(__file__) if", "c_double_p), cast(self.colbuff, c_int_p), const.optype, const.rhs) assert ret == 1, \"Can't", "= \"real\" self.retype(vtype) def retype(self, vtype): \"Change the type of", "isinstance(other, LpConstraint): return other.__add__(self) return LpConstraint([1, 1], [self, other]) class", "* maxvars)() self.rowbuff = (c_double * maxvars)() lib.set_add_rowmode(self.lp, 1) def", "0) const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff,", "Engine.\" def __init__(self, maxvars, debug=False): self.debug = debug self.maxvars =", "the model.\" lib.set_maxim(self.lp) if self.debug: lib.write_lp(self.lp, b\"debug-model.lp\") else: lib.set_verbose(self.lp, 3)", "maxvars self.vars = [] self.lp = lib.make_lp(0, maxvars) assert self.lp", "return LpConstraint([1, 1], [self, other]) class LpConstraint(object): \"A LpSolve constraint.\"", "ret == 1, \"Can't set objective function of model\" def", "\"Can't get variable values\" for i, var in enumerate(self.vars): var.value", "set objective function of model\" def update_variables(self): \"Update the variable", "import sys import os.path as path import platform # Import", "lib = windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" % ver))) elif sys.platform == \"linux\":", "lib.set_binary.restype = c_ubyte lib.set_int.argtypes = [c_void_p, c_int, c_ubyte] lib.set_int.restype =", "__rmul__(self, num): return LpConstraint([num], [self]) def __add__(self, other): if isinstance(other,", "[c_void_p, c_int, c_ubyte] lib.set_binary.restype = c_ubyte lib.set_int.argtypes = [c_void_p, c_int,", "LpConstraint(object): \"A LpSolve constraint.\" def __init__(self, numbers, vars): self.numbers =", "times\" return c def __le__(self, val): self.optype, self.rhs = (1,", "path import platform # Import the DLL ver = (\"x86\",", "lib.set_add_rowmode.argtypes = [c_void_p, c_ubyte] lib.set_add_rowmode.restype = c_ubyte lib.set_maxim.argtypes = [c_void_p]", "0 or ret == 1: self.update_variables() return ret class LpVariable(object):", "lib.get_variables.restype = c_ubyte class LpEngine(object): \"The Linear Programming Engine.\" def", "self.maxvars = maxvars self.vars = [] self.lp = lib.make_lp(0, maxvars)", "c_int] lib.solve.argtypes = [c_void_p] lib.solve.restype = c_int lib.get_variables.argtypes = [c_void_p,", "self.lp = lp lp.vars.append(self) self.type = \"real\" self.retype(vtype) def retype(self,", "__init__(self, numbers, vars): self.numbers = numbers self.vars = vars self.optype", "self.update_variables() return ret class LpVariable(object): \"A LpSolve variable.\" def __init__(self,", "== 0 or ret == 1: self.update_variables() return ret class", "c_double_p] lib.get_variables.restype = c_ubyte class LpEngine(object): \"The Linear Programming Engine.\"", "const.optype is not None, \"You must provide the RHS of", "const): \"Set the objective function.\" lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff, self.rowbuff) ret", "# Import the DLL ver = (\"x86\", \"x64\")[sys.maxsize > 2**32]", "assert ret == 1, \"Can't add constraint into model\" def", "= vars self.optype = None self.rhs = None def fill_buffers(self,", "[c_void_p, c_int, c_ubyte] lib.set_int.restype = c_ubyte lib.add_constraintex.argtypes = [c_void_p, c_int,", "solve(self): \"Solve the model.\" lib.set_maxim(self.lp) if self.debug: lib.write_lp(self.lp, b\"debug-model.lp\") else:", "__add__(self, other): if isinstance(other, LpVariable): return LpConstraint(self.numbers + [1], self.vars", "= (3, val) return self def __ge__(self, val): self.optype, self.rhs", "Make the bindings c_double_p = POINTER(c_double) c_int_p = POINTER(c_int) lib.make_lp.argtypes", "def __init__(self, lp, vtype=\"real\"): assert len(lp.vars) < lp.maxvars, \"Can't add", "lib.solve.restype = c_int lib.get_variables.argtypes = [c_void_p, c_double_p] lib.get_variables.restype = c_ubyte", "for i, (num, var) in enumerate(zip(self.numbers, self.vars)): colno[i] = var.index", "def __ge__(self, val): self.optype, self.rhs = (2, val) return self", "of constraint\" const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p),", "(c_double * maxvars)() lib.set_add_rowmode(self.lp, 1) def __del__(self): lib.delete_lp(self.lp) def constraint(self,", "enumerate(zip(self.numbers, self.vars)): colno[i] = var.index row[i] = num def __add__(self,", "is not None, \"You must provide the RHS of constraint\"", "(vtype == \"bin\")) elif \"int\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index,", "\"real\" self.retype(vtype) def retype(self, vtype): \"Change the type of the", "__le__(self, val): self.optype, self.rhs = (1, val) return self def", "(3, val) return self def __ge__(self, val): self.optype, self.rhs =", "self.rowbuff[i] def solve(self): \"Solve the model.\" lib.set_maxim(self.lp) if self.debug: lib.write_lp(self.lp,", "lp.vars.append(self) self.type = \"real\" self.retype(vtype) def retype(self, vtype): \"Change the", "def __del__(self): lib.delete_lp(self.lp) def constraint(self, const): \"Add a new constraint", "other): if isinstance(other, LpVariable): return LpConstraint(self.numbers + [1], self.vars +", "__init__(self, maxvars, debug=False): self.debug = debug self.maxvars = maxvars self.vars", "constraint into model\" def objective(self, const): \"Set the objective function.\"", "= POINTER(c_int) lib.make_lp.argtypes = [c_int, c_int] lib.make_lp.restype = c_void_p lib.delete_lp.argtypes", "= (1, val) return self def __eq__(self, val): self.optype, self.rhs", "= lib.make_lp(0, maxvars) assert self.lp != 0, \"Can't construct a", "= [c_void_p] lib.set_binary.argtypes = [c_void_p, c_int, c_ubyte] lib.set_binary.restype = c_ubyte", "cast(self.rowbuff, c_double_p)) assert ret == 1, \"Can't get variable values\"", "constraint.\" def __init__(self, numbers, vars): self.numbers = numbers self.vars =", "self.rowbuff = (c_double * maxvars)() lib.set_add_rowmode(self.lp, 1) def __del__(self): lib.delete_lp(self.lp)", "= self.rowbuff[i] def solve(self): \"Solve the model.\" lib.set_maxim(self.lp) if self.debug:", "\"Can't set objective function of model\" def update_variables(self): \"Update the", "c_double_p)) assert ret == 1, \"Can't get variable values\" for", "vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"bin\")) elif \"int\" in (self.type,", "lib.make_lp.argtypes = [c_int, c_int] lib.make_lp.restype = c_void_p lib.delete_lp.argtypes = [c_void_p]", "\"Solve the model.\" lib.set_maxim(self.lp) if self.debug: lib.write_lp(self.lp, b\"debug-model.lp\") else: lib.set_verbose(self.lp,", "Copyright (C) 2018, <NAME> License MIT \"\"\" from ctypes import", "def update_variables(self): \"Update the variable values.\" ret = lib.get_variables(self.lp, cast(self.rowbuff,", "maxvars)() self.rowbuff = (c_double * maxvars)() lib.set_add_rowmode(self.lp, 1) def __del__(self):", "LpSolve.\" for i, (num, var) in enumerate(zip(self.numbers, self.vars)): colno[i] =", "c_ubyte] lib.set_int.restype = c_ubyte lib.add_constraintex.argtypes = [c_void_p, c_int, c_double_p, c_int_p,", "[1], self.vars + [other]) else: c = LpConstraint(self.numbers + other.numbers,", "= c_ubyte lib.set_int.argtypes = [c_void_p, c_int, c_ubyte] lib.set_int.restype = c_ubyte", "lib.solve.argtypes = [c_void_p] lib.solve.restype = c_int lib.get_variables.argtypes = [c_void_p, c_double_p]", "== 1, \"Can't set objective function of model\" def update_variables(self):", "self.optype, self.rhs = (1, val) return self def __eq__(self, val):", "colno[i] = var.index row[i] = num def __add__(self, other): if", "ret == 1, \"Can't add constraint into model\" def objective(self,", "constraint\" const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff,", "ret class LpVariable(object): \"A LpSolve variable.\" def __init__(self, lp, vtype=\"real\"):", "lib.add_constraintex.restype = c_ubyte lib.set_obj_fnex.argtypes = [c_void_p, c_int, c_double_p, c_int_p] lib.set_obj_fnex.restype", "other.__add__(self) return LpConstraint([1, 1], [self, other]) class LpConstraint(object): \"A LpSolve", "lib.set_verbose(self.lp, 3) ret = lib.solve(self.lp) if ret == 0 or", "= numbers self.vars = vars self.optype = None self.rhs =", "def __add__(self, other): if isinstance(other, LpConstraint): return other.__add__(self) return LpConstraint([1,", "= lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p), const.optype, const.rhs) assert", "LpVariable): return LpConstraint(self.numbers + [1], self.vars + [other]) else: c", "def __init__(self, numbers, vars): self.numbers = numbers self.vars = vars", "not None, \"You must provide the RHS of constraint\" const.fill_buffers(self.colbuff,", "(\"x86\", \"x64\")[sys.maxsize > 2**32] here = path.dirname(__file__) if sys.platform ==", "val) return self def __eq__(self, val): self.optype, self.rhs = (3,", "= c_ubyte lib.set_verbose.argtypes = [c_void_p, c_int] lib.solve.argtypes = [c_void_p] lib.solve.restype", "row): \"Fill colno and row buffers for calling LpSolve.\" for", "self.vars + other.vars) assert len(c.vars) == len(set(c.vars)), \"Some variables appear", "lib.get_variables.argtypes = [c_void_p, c_double_p] lib.get_variables.restype = c_ubyte class LpEngine(object): \"The", "c_int_p = POINTER(c_int) lib.make_lp.argtypes = [c_int, c_int] lib.make_lp.restype = c_void_p", "lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p), const.optype, const.rhs) assert ret", "b\"debug-model.lp\") else: lib.set_verbose(self.lp, 3) ret = lib.solve(self.lp) if ret ==", "ctypes import * import sys import os.path as path import", "\"You must provide the RHS of constraint\" const.fill_buffers(self.colbuff, self.rowbuff) ret", "isinstance(other, LpVariable): return LpConstraint(self.numbers + [1], self.vars + [other]) else:", "if \"bin\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"bin\"))", "other): if isinstance(other, LpConstraint): return other.__add__(self) return LpConstraint([1, 1], [self,", "lib.set_maxim.argtypes = [c_void_p] lib.write_lp.argtypes = [c_void_p, c_char_p] lib.write_lp.restype = c_ubyte", "in enumerate(zip(self.numbers, self.vars)): colno[i] = var.index row[i] = num def", "[c_void_p, c_int, c_double_p, c_int_p] lib.set_obj_fnex.restype = c_ubyte lib.set_add_rowmode.argtypes = [c_void_p,", "== 1: self.update_variables() return ret class LpVariable(object): \"A LpSolve variable.\"", "\"Can't construct a new LpSolve model\" self.colbuff = (c_int *", "def objective(self, const): \"Set the objective function.\" lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff,", "\"Change the type of the variable\" if \"bin\" in (self.type,", "ValueError(\"Can't load LpSolve library on this platform.\") # Make the", "ver))) elif sys.platform == \"linux\": lib = cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" %", "LpSolve wrapper. Copyright (C) 2018, <NAME> License MIT \"\"\" from", "len(lp.vars) + 1 self.value = None self.lp = lp lp.vars.append(self)", "= cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" % ver))) else: raise ValueError(\"Can't load LpSolve", "c_int lib.get_variables.argtypes = [c_void_p, c_double_p] lib.get_variables.restype = c_ubyte class LpEngine(object):", "return self def __eq__(self, val): self.optype, self.rhs = (3, val)", "None self.lp = lp lp.vars.append(self) self.type = \"real\" self.retype(vtype) def", "def __rmul__(self, num): return LpConstraint([num], [self]) def __add__(self, other): if", "for calling LpSolve.\" for i, (num, var) in enumerate(zip(self.numbers, self.vars)):", "objective function.\" lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.set_obj_fnex(self.lp, len(const.vars),", "appear several times\" return c def __le__(self, val): self.optype, self.rhs", "[c_void_p] lib.solve.restype = c_int lib.get_variables.argtypes = [c_void_p, c_double_p] lib.get_variables.restype =", "License MIT \"\"\" from ctypes import * import sys import", "constraint(self, const): \"Add a new constraint into the model.\" assert", "or ret == 1: self.update_variables() return ret class LpVariable(object): \"A", "of the variable\" if \"bin\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index,", "the RHS of constraint\" const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.add_constraintex(self.lp, len(const.vars),", "c_ubyte lib.set_maxim.argtypes = [c_void_p] lib.write_lp.argtypes = [c_void_p, c_char_p] lib.write_lp.restype =", "lib.write_lp(self.lp, b\"debug-model.lp\") else: lib.set_verbose(self.lp, 3) ret = lib.solve(self.lp) if ret", "1, \"Can't add constraint into model\" def objective(self, const): \"Set", "lib.set_obj_fnex.argtypes = [c_void_p, c_int, c_double_p, c_int_p] lib.set_obj_fnex.restype = c_ubyte lib.set_add_rowmode.argtypes", "LpSolve variable.\" def __init__(self, lp, vtype=\"real\"): assert len(lp.vars) < lp.maxvars,", "else: lib.set_verbose(self.lp, 3) ret = lib.solve(self.lp) if ret == 0", "\"int\")) def __rmul__(self, num): return LpConstraint([num], [self]) def __add__(self, other):", "= (c_int * maxvars)() self.rowbuff = (c_double * maxvars)() lib.set_add_rowmode(self.lp,", "[c_void_p, c_double_p] lib.get_variables.restype = c_ubyte class LpEngine(object): \"The Linear Programming", "None def fill_buffers(self, colno, row): \"Fill colno and row buffers", "return self def __ge__(self, val): self.optype, self.rhs = (2, val)", "== \"bin\")) elif \"int\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype", "= c_ubyte class LpEngine(object): \"The Linear Programming Engine.\" def __init__(self,", "several times\" return c def __le__(self, val): self.optype, self.rhs =", "c_ubyte] lib.set_binary.restype = c_ubyte lib.set_int.argtypes = [c_void_p, c_int, c_ubyte] lib.set_int.restype", "platform.\") # Make the bindings c_double_p = POINTER(c_double) c_int_p =", "self.vars)): colno[i] = var.index row[i] = num def __add__(self, other):", "lib.solve(self.lp) if ret == 0 or ret == 1: self.update_variables()", "\"bin\")) elif \"int\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype ==", "[c_void_p] lib.write_lp.argtypes = [c_void_p, c_char_p] lib.write_lp.restype = c_ubyte lib.set_verbose.argtypes =", "\"Can't add constraint into model\" def objective(self, const): \"Set the", "LpVariable(object): \"A LpSolve variable.\" def __init__(self, lp, vtype=\"real\"): assert len(lp.vars)", "construct a new LpSolve model\" self.colbuff = (c_int * maxvars)()", "const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p))", "lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p)) assert ret == 1,", "LpEngine(object): \"The Linear Programming Engine.\" def __init__(self, maxvars, debug=False): self.debug", "colno, row): \"Fill colno and row buffers for calling LpSolve.\"", "= [c_void_p, c_int, c_double_p, c_int_p] lib.set_obj_fnex.restype = c_ubyte lib.set_add_rowmode.argtypes =", "1], [self, other]) class LpConstraint(object): \"A LpSolve constraint.\" def __init__(self,", "\"dll/lpsolve55-%s.so\" % ver))) else: raise ValueError(\"Can't load LpSolve library on", "model\" self.colbuff = (c_int * maxvars)() self.rowbuff = (c_double *", "const.rhs) assert ret == 1, \"Can't add constraint into model\"", "the variable\" if \"bin\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype", "add constraint into model\" def objective(self, const): \"Set the objective", "self.index = len(lp.vars) + 1 self.value = None self.lp =", "def constraint(self, const): \"Add a new constraint into the model.\"", "a new constraint into the model.\" assert const.optype is not", "row buffers for calling LpSolve.\" for i, (num, var) in", "else: c = LpConstraint(self.numbers + other.numbers, self.vars + other.vars) assert", "cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p), const.optype, const.rhs) assert ret == 1,", "new constraint into the model.\" assert const.optype is not None,", "= c_int lib.get_variables.argtypes = [c_void_p, c_double_p] lib.get_variables.restype = c_ubyte class", "== 1, \"Can't add constraint into model\" def objective(self, const):", "raise ValueError(\"Can't load LpSolve library on this platform.\") # Make", "\"Update the variable values.\" ret = lib.get_variables(self.lp, cast(self.rowbuff, c_double_p)) assert", "== len(set(c.vars)), \"Some variables appear several times\" return c def", "2**32] here = path.dirname(__file__) if sys.platform == \"win32\": lib =", "> 2**32] here = path.dirname(__file__) if sys.platform == \"win32\": lib", "lib.set_int.restype = c_ubyte lib.add_constraintex.argtypes = [c_void_p, c_int, c_double_p, c_int_p, c_int,", "= [c_void_p] lib.solve.restype = c_int lib.get_variables.argtypes = [c_void_p, c_double_p] lib.get_variables.restype", "\"The Linear Programming Engine.\" def __init__(self, maxvars, debug=False): self.debug =", "[c_void_p] lib.set_binary.argtypes = [c_void_p, c_int, c_ubyte] lib.set_binary.restype = c_ubyte lib.set_int.argtypes", "ret == 1, \"Can't get variable values\" for i, var", "c_char_p] lib.write_lp.restype = c_ubyte lib.set_verbose.argtypes = [c_void_p, c_int] lib.solve.argtypes =", "len(c.vars) == len(set(c.vars)), \"Some variables appear several times\" return c", "len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p)) assert ret == 1, \"Can't", "into the model.\" assert const.optype is not None, \"You must", "[c_void_p, c_int] lib.solve.argtypes = [c_void_p] lib.solve.restype = c_int lib.get_variables.argtypes =", "c_double_p), cast(self.colbuff, c_int_p)) assert ret == 1, \"Can't set objective", "i, (num, var) in enumerate(zip(self.numbers, self.vars)): colno[i] = var.index row[i]", "cast(self.colbuff, c_int_p)) assert ret == 1, \"Can't set objective function", "the DLL ver = (\"x86\", \"x64\")[sys.maxsize > 2**32] here =", "lib.set_obj_fnex.restype = c_ubyte lib.set_add_rowmode.argtypes = [c_void_p, c_ubyte] lib.set_add_rowmode.restype = c_ubyte", "if isinstance(other, LpConstraint): return other.__add__(self) return LpConstraint([1, 1], [self, other])", "into model\" def objective(self, const): \"Set the objective function.\" lib.set_add_rowmode(self.lp,", "other]) class LpConstraint(object): \"A LpSolve constraint.\" def __init__(self, numbers, vars):", "\"Add a new constraint into the model.\" assert const.optype is", "= windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" % ver))) elif sys.platform == \"linux\": lib", "i, var in enumerate(self.vars): var.value = self.rowbuff[i] def solve(self): \"Solve", "maxvars, debug=False): self.debug = debug self.maxvars = maxvars self.vars =", "LpConstraint(self.numbers + [1], self.vars + [other]) else: c = LpConstraint(self.numbers", "self def __ge__(self, val): self.optype, self.rhs = (2, val) return", "the variable values.\" ret = lib.get_variables(self.lp, cast(self.rowbuff, c_double_p)) assert ret", "self.rhs = None def fill_buffers(self, colno, row): \"Fill colno and", "numbers, vars): self.numbers = numbers self.vars = vars self.optype =", "POINTER(c_int) lib.make_lp.argtypes = [c_int, c_int] lib.make_lp.restype = c_void_p lib.delete_lp.argtypes =", "other.vars) assert len(c.vars) == len(set(c.vars)), \"Some variables appear several times\"", "model\" def update_variables(self): \"Update the variable values.\" ret = lib.get_variables(self.lp,", "values.\" ret = lib.get_variables(self.lp, cast(self.rowbuff, c_double_p)) assert ret == 1,", "LpSolve constraint.\" def __init__(self, numbers, vars): self.numbers = numbers self.vars", "c_int_p, c_int, c_double] lib.add_constraintex.restype = c_ubyte lib.set_obj_fnex.argtypes = [c_void_p, c_int,", "1, \"Can't get variable values\" for i, var in enumerate(self.vars):", "lib.set_maxim(self.lp) if self.debug: lib.write_lp(self.lp, b\"debug-model.lp\") else: lib.set_verbose(self.lp, 3) ret =", "platform # Import the DLL ver = (\"x86\", \"x64\")[sys.maxsize >", "variable values\" for i, var in enumerate(self.vars): var.value = self.rowbuff[i]", "on this platform.\") # Make the bindings c_double_p = POINTER(c_double)", "= [c_void_p, c_char_p] lib.write_lp.restype = c_ubyte lib.set_verbose.argtypes = [c_void_p, c_int]", "cast(self.colbuff, c_int_p), const.optype, const.rhs) assert ret == 1, \"Can't add", "as path import platform # Import the DLL ver =", "c_double_p, c_int_p] lib.set_obj_fnex.restype = c_ubyte lib.set_add_rowmode.argtypes = [c_void_p, c_ubyte] lib.set_add_rowmode.restype", "self.colbuff = (c_int * maxvars)() self.rowbuff = (c_double * maxvars)()", "1) def __del__(self): lib.delete_lp(self.lp) def constraint(self, const): \"Add a new", "= lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p)) assert ret ==", "c = LpConstraint(self.numbers + other.numbers, self.vars + other.vars) assert len(c.vars)", "* import sys import os.path as path import platform #", "= debug self.maxvars = maxvars self.vars = [] self.lp =", "const.optype, const.rhs) assert ret == 1, \"Can't add constraint into", "\"Can't add a variable: \" self.index = len(lp.vars) + 1", "debug=False): self.debug = debug self.maxvars = maxvars self.vars = []", "model.\" assert const.optype is not None, \"You must provide the", "== \"win32\": lib = windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" % ver))) elif sys.platform", "(self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"bin\")) elif \"int\" in", "self.rhs = (1, val) return self def __eq__(self, val): self.optype,", "ret = lib.get_variables(self.lp, cast(self.rowbuff, c_double_p)) assert ret == 1, \"Can't", "self.numbers = numbers self.vars = vars self.optype = None self.rhs", "1: self.update_variables() return ret class LpVariable(object): \"A LpSolve variable.\" def", "assert len(c.vars) == len(set(c.vars)), \"Some variables appear several times\" return", "= None self.lp = lp lp.vars.append(self) self.type = \"real\" self.retype(vtype)", "= c_ubyte lib.add_constraintex.argtypes = [c_void_p, c_int, c_double_p, c_int_p, c_int, c_double]", "lib.write_lp.restype = c_ubyte lib.set_verbose.argtypes = [c_void_p, c_int] lib.solve.argtypes = [c_void_p]", "(self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"int\")) def __rmul__(self, num):", "<NAME> License MIT \"\"\" from ctypes import * import sys", "c_int, c_ubyte] lib.set_binary.restype = c_ubyte lib.set_int.argtypes = [c_void_p, c_int, c_ubyte]", "= lib.solve(self.lp) if ret == 0 or ret == 1:", "def __eq__(self, val): self.optype, self.rhs = (3, val) return self", "\"bin\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"bin\")) elif", "len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p), const.optype, const.rhs) assert ret ==", "self.rhs = (3, val) return self def __ge__(self, val): self.optype,", "LpConstraint): return other.__add__(self) return LpConstraint([1, 1], [self, other]) class LpConstraint(object):", "= len(lp.vars) + 1 self.value = None self.lp = lp", "function of model\" def update_variables(self): \"Update the variable values.\" ret", "c_ubyte lib.set_verbose.argtypes = [c_void_p, c_int] lib.solve.argtypes = [c_void_p] lib.solve.restype =", "== 1, \"Can't get variable values\" for i, var in", "+ [other]) else: c = LpConstraint(self.numbers + other.numbers, self.vars +", "variable values.\" ret = lib.get_variables(self.lp, cast(self.rowbuff, c_double_p)) assert ret ==", "c_ubyte lib.add_constraintex.argtypes = [c_void_p, c_int, c_double_p, c_int_p, c_int, c_double] lib.add_constraintex.restype", "numbers self.vars = vars self.optype = None self.rhs = None", "= [c_void_p, c_int, c_ubyte] lib.set_int.restype = c_ubyte lib.add_constraintex.argtypes = [c_void_p,", "1 self.value = None self.lp = lp lp.vars.append(self) self.type =", "def __init__(self, maxvars, debug=False): self.debug = debug self.maxvars = maxvars", "colno and row buffers for calling LpSolve.\" for i, (num,", "[other]) else: c = LpConstraint(self.numbers + other.numbers, self.vars + other.vars)", "for i, var in enumerate(self.vars): var.value = self.rowbuff[i] def solve(self):", "os.path as path import platform # Import the DLL ver", "variable\" if \"bin\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype ==", "= lp lp.vars.append(self) self.type = \"real\" self.retype(vtype) def retype(self, vtype):", "return other.__add__(self) return LpConstraint([1, 1], [self, other]) class LpConstraint(object): \"A", "= var.index row[i] = num def __add__(self, other): if isinstance(other,", "objective function of model\" def update_variables(self): \"Update the variable values.\"", "self.optype = None self.rhs = None def fill_buffers(self, colno, row):", "c_int, c_double_p, c_int_p, c_int, c_double] lib.add_constraintex.restype = c_ubyte lib.set_obj_fnex.argtypes =", "__eq__(self, val): self.optype, self.rhs = (3, val) return self def", "= [c_void_p, c_int, c_double_p, c_int_p, c_int, c_double] lib.add_constraintex.restype = c_ubyte", "\"win32\": lib = windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" % ver))) elif sys.platform ==", "c_double_p, c_int_p, c_int, c_double] lib.add_constraintex.restype = c_ubyte lib.set_obj_fnex.argtypes = [c_void_p,", "= c_ubyte lib.set_add_rowmode.argtypes = [c_void_p, c_ubyte] lib.set_add_rowmode.restype = c_ubyte lib.set_maxim.argtypes", "= LpConstraint(self.numbers + other.numbers, self.vars + other.vars) assert len(c.vars) ==", "val): self.optype, self.rhs = (1, val) return self def __eq__(self,", "self.vars + [other]) else: c = LpConstraint(self.numbers + other.numbers, self.vars", "% ver))) elif sys.platform == \"linux\": lib = cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\"", "num def __add__(self, other): if isinstance(other, LpVariable): return LpConstraint(self.numbers +", "assert len(lp.vars) < lp.maxvars, \"Can't add a variable: \" self.index", "if isinstance(other, LpVariable): return LpConstraint(self.numbers + [1], self.vars + [other])", "self.optype, self.rhs = (3, val) return self def __ge__(self, val):", "[self, other]) class LpConstraint(object): \"A LpSolve constraint.\" def __init__(self, numbers,", "def retype(self, vtype): \"Change the type of the variable\" if", "the model.\" assert const.optype is not None, \"You must provide", "self.index, (vtype == \"bin\")) elif \"int\" in (self.type, vtype): lib.set_binary(self.lp.lp,", "vars): self.numbers = numbers self.vars = vars self.optype = None", "a variable: \" self.index = len(lp.vars) + 1 self.value =", "if ret == 0 or ret == 1: self.update_variables() return", "c_void_p lib.delete_lp.argtypes = [c_void_p] lib.set_binary.argtypes = [c_void_p, c_int, c_ubyte] lib.set_binary.restype", "if self.debug: lib.write_lp(self.lp, b\"debug-model.lp\") else: lib.set_verbose(self.lp, 3) ret = lib.solve(self.lp)", "of model\" def update_variables(self): \"Update the variable values.\" ret =", "\" self.index = len(lp.vars) + 1 self.value = None self.lp", "LpSolve library on this platform.\") # Make the bindings c_double_p", "__init__(self, lp, vtype=\"real\"): assert len(lp.vars) < lp.maxvars, \"Can't add a", "import * import sys import os.path as path import platform", "must provide the RHS of constraint\" const.fill_buffers(self.colbuff, self.rowbuff) ret =", "maxvars) assert self.lp != 0, \"Can't construct a new LpSolve", "assert const.optype is not None, \"You must provide the RHS", "3) ret = lib.solve(self.lp) if ret == 0 or ret", "LpConstraint([num], [self]) def __add__(self, other): if isinstance(other, LpConstraint): return other.__add__(self)", "lib.set_binary(self.lp.lp, self.index, (vtype == \"int\")) def __rmul__(self, num): return LpConstraint([num],", "lib.make_lp.restype = c_void_p lib.delete_lp.argtypes = [c_void_p] lib.set_binary.argtypes = [c_void_p, c_int,", "const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p),", "== \"int\")) def __rmul__(self, num): return LpConstraint([num], [self]) def __add__(self,", "MIT \"\"\" from ctypes import * import sys import os.path", "class LpConstraint(object): \"A LpSolve constraint.\" def __init__(self, numbers, vars): self.numbers", "DLL ver = (\"x86\", \"x64\")[sys.maxsize > 2**32] here = path.dirname(__file__)", "calling LpSolve.\" for i, (num, var) in enumerate(zip(self.numbers, self.vars)): colno[i]", "new LpSolve model\" self.colbuff = (c_int * maxvars)() self.rowbuff =", "self.lp != 0, \"Can't construct a new LpSolve model\" self.colbuff", "# Make the bindings c_double_p = POINTER(c_double) c_int_p = POINTER(c_int)", "c_ubyte lib.set_add_rowmode.argtypes = [c_void_p, c_ubyte] lib.set_add_rowmode.restype = c_ubyte lib.set_maxim.argtypes =", "<filename>home/scripts/memory/lpsolve.py \"\"\" LpSolve wrapper. Copyright (C) 2018, <NAME> License MIT", "values\" for i, var in enumerate(self.vars): var.value = self.rowbuff[i] def", "+ 1 self.value = None self.lp = lp lp.vars.append(self) self.type", "return c def __le__(self, val): self.optype, self.rhs = (1, val)", "var in enumerate(self.vars): var.value = self.rowbuff[i] def solve(self): \"Solve the", "self.debug = debug self.maxvars = maxvars self.vars = [] self.lp", "= (c_double * maxvars)() lib.set_add_rowmode(self.lp, 1) def __del__(self): lib.delete_lp(self.lp) def", "the type of the variable\" if \"bin\" in (self.type, vtype):", "import platform # Import the DLL ver = (\"x86\", \"x64\")[sys.maxsize", "load LpSolve library on this platform.\") # Make the bindings", "\"int\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"int\")) def", "\"x64\")[sys.maxsize > 2**32] here = path.dirname(__file__) if sys.platform == \"win32\":", "\"A LpSolve constraint.\" def __init__(self, numbers, vars): self.numbers = numbers", "self.index, (vtype == \"int\")) def __rmul__(self, num): return LpConstraint([num], [self])", "get variable values\" for i, var in enumerate(self.vars): var.value =", "!= 0, \"Can't construct a new LpSolve model\" self.colbuff =", "var) in enumerate(zip(self.numbers, self.vars)): colno[i] = var.index row[i] = num", "Import the DLL ver = (\"x86\", \"x64\")[sys.maxsize > 2**32] here", "self.type = \"real\" self.retype(vtype) def retype(self, vtype): \"Change the type", "vtype): \"Change the type of the variable\" if \"bin\" in", "self.value = None self.lp = lp lp.vars.append(self) self.type = \"real\"", "windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" % ver))) elif sys.platform == \"linux\": lib =", "\"\"\" from ctypes import * import sys import os.path as", "from ctypes import * import sys import os.path as path", "lp, vtype=\"real\"): assert len(lp.vars) < lp.maxvars, \"Can't add a variable:", "var.index row[i] = num def __add__(self, other): if isinstance(other, LpVariable):", "+ [1], self.vars + [other]) else: c = LpConstraint(self.numbers +", "def __add__(self, other): if isinstance(other, LpVariable): return LpConstraint(self.numbers + [1],", "assert ret == 1, \"Can't set objective function of model\"", "__add__(self, other): if isinstance(other, LpConstraint): return other.__add__(self) return LpConstraint([1, 1],", "c_int_p)) assert ret == 1, \"Can't set objective function of", "c_int, c_double] lib.add_constraintex.restype = c_ubyte lib.set_obj_fnex.argtypes = [c_void_p, c_int, c_double_p,", "vars self.optype = None self.rhs = None def fill_buffers(self, colno,", "variables appear several times\" return c def __le__(self, val): self.optype,", "lib.set_add_rowmode(self.lp, 1) def __del__(self): lib.delete_lp(self.lp) def constraint(self, const): \"Add a", "debug self.maxvars = maxvars self.vars = [] self.lp = lib.make_lp(0,", "1, \"Can't set objective function of model\" def update_variables(self): \"Update", "= None self.rhs = None def fill_buffers(self, colno, row): \"Fill", "bindings c_double_p = POINTER(c_double) c_int_p = POINTER(c_int) lib.make_lp.argtypes = [c_int,", "= [c_void_p, c_double_p] lib.get_variables.restype = c_ubyte class LpEngine(object): \"The Linear", "c_double] lib.add_constraintex.restype = c_ubyte lib.set_obj_fnex.argtypes = [c_void_p, c_int, c_double_p, c_int_p]", "here = path.dirname(__file__) if sys.platform == \"win32\": lib = windll.LoadLibrary(path.abspath(path.join(here,", "class LpVariable(object): \"A LpSolve variable.\" def __init__(self, lp, vtype=\"real\"): assert", "= c_ubyte lib.set_maxim.argtypes = [c_void_p] lib.write_lp.argtypes = [c_void_p, c_char_p] lib.write_lp.restype", "(num, var) in enumerate(zip(self.numbers, self.vars)): colno[i] = var.index row[i] =", "= [c_int, c_int] lib.make_lp.restype = c_void_p lib.delete_lp.argtypes = [c_void_p] lib.set_binary.argtypes", "c_int, c_ubyte] lib.set_int.restype = c_ubyte lib.add_constraintex.argtypes = [c_void_p, c_int, c_double_p,", "lib.set_binary.argtypes = [c_void_p, c_int, c_ubyte] lib.set_binary.restype = c_ubyte lib.set_int.argtypes =", "variable.\" def __init__(self, lp, vtype=\"real\"): assert len(lp.vars) < lp.maxvars, \"Can't", "2018, <NAME> License MIT \"\"\" from ctypes import * import", "= c_ubyte lib.set_obj_fnex.argtypes = [c_void_p, c_int, c_double_p, c_int_p] lib.set_obj_fnex.restype =", "model\" def objective(self, const): \"Set the objective function.\" lib.set_add_rowmode(self.lp, 0)", "sys.platform == \"linux\": lib = cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" % ver))) else:", "lib.set_verbose.argtypes = [c_void_p, c_int] lib.solve.argtypes = [c_void_p] lib.solve.restype = c_int", "lib.set_binary(self.lp.lp, self.index, (vtype == \"bin\")) elif \"int\" in (self.type, vtype):", "self.retype(vtype) def retype(self, vtype): \"Change the type of the variable\"", "\"\"\" LpSolve wrapper. Copyright (C) 2018, <NAME> License MIT \"\"\"", "buffers for calling LpSolve.\" for i, (num, var) in enumerate(zip(self.numbers,", "return LpConstraint(self.numbers + [1], self.vars + [other]) else: c =", "ret = lib.solve(self.lp) if ret == 0 or ret ==", "class LpEngine(object): \"The Linear Programming Engine.\" def __init__(self, maxvars, debug=False):", "ret == 0 or ret == 1: self.update_variables() return ret", "lib = cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" % ver))) else: raise ValueError(\"Can't load", "= POINTER(c_double) c_int_p = POINTER(c_int) lib.make_lp.argtypes = [c_int, c_int] lib.make_lp.restype", "assert self.lp != 0, \"Can't construct a new LpSolve model\"", "sys import os.path as path import platform # Import the", "sys.platform == \"win32\": lib = windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" % ver))) elif", "None, \"You must provide the RHS of constraint\" const.fill_buffers(self.colbuff, self.rowbuff)", "num): return LpConstraint([num], [self]) def __add__(self, other): if isinstance(other, LpConstraint):", "elif \"int\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"int\"))", "c_double_p = POINTER(c_double) c_int_p = POINTER(c_int) lib.make_lp.argtypes = [c_int, c_int]", "import os.path as path import platform # Import the DLL", "ret == 1: self.update_variables() return ret class LpVariable(object): \"A LpSolve", "c_int] lib.make_lp.restype = c_void_p lib.delete_lp.argtypes = [c_void_p] lib.set_binary.argtypes = [c_void_p,", "= [c_void_p, c_int, c_ubyte] lib.set_binary.restype = c_ubyte lib.set_int.argtypes = [c_void_p,", "enumerate(self.vars): var.value = self.rowbuff[i] def solve(self): \"Solve the model.\" lib.set_maxim(self.lp)", "= [c_void_p, c_ubyte] lib.set_add_rowmode.restype = c_ubyte lib.set_maxim.argtypes = [c_void_p] lib.write_lp.argtypes", "0, \"Can't construct a new LpSolve model\" self.colbuff = (c_int", "c_ubyte class LpEngine(object): \"The Linear Programming Engine.\" def __init__(self, maxvars,", "type of the variable\" if \"bin\" in (self.type, vtype): lib.set_binary(self.lp.lp,", "cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p)) assert ret == 1, \"Can't set", "LpConstraint(self.numbers + other.numbers, self.vars + other.vars) assert len(c.vars) == len(set(c.vars)),", "= [c_void_p, c_int] lib.solve.argtypes = [c_void_p] lib.solve.restype = c_int lib.get_variables.argtypes", "lib.write_lp.argtypes = [c_void_p, c_char_p] lib.write_lp.restype = c_ubyte lib.set_verbose.argtypes = [c_void_p,", "[c_void_p, c_char_p] lib.write_lp.restype = c_ubyte lib.set_verbose.argtypes = [c_void_p, c_int] lib.solve.argtypes", "c_int_p), const.optype, const.rhs) assert ret == 1, \"Can't add constraint", "== \"linux\": lib = cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" % ver))) else: raise", "def __le__(self, val): self.optype, self.rhs = (1, val) return self", "= None def fill_buffers(self, colno, row): \"Fill colno and row", "this platform.\") # Make the bindings c_double_p = POINTER(c_double) c_int_p", "a new LpSolve model\" self.colbuff = (c_int * maxvars)() self.rowbuff", "\"Some variables appear several times\" return c def __le__(self, val):", "[] self.lp = lib.make_lp(0, maxvars) assert self.lp != 0, \"Can't", "Programming Engine.\" def __init__(self, maxvars, debug=False): self.debug = debug self.maxvars", "cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" % ver))) else: raise ValueError(\"Can't load LpSolve library", "lib.set_add_rowmode.restype = c_ubyte lib.set_maxim.argtypes = [c_void_p] lib.write_lp.argtypes = [c_void_p, c_char_p]", "vtype=\"real\"): assert len(lp.vars) < lp.maxvars, \"Can't add a variable: \"", "c def __le__(self, val): self.optype, self.rhs = (1, val) return", "c_ubyte lib.set_obj_fnex.argtypes = [c_void_p, c_int, c_double_p, c_int_p] lib.set_obj_fnex.restype = c_ubyte", "\"Set the objective function.\" lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff, self.rowbuff) ret =", "other.numbers, self.vars + other.vars) assert len(c.vars) == len(set(c.vars)), \"Some variables", "the bindings c_double_p = POINTER(c_double) c_int_p = POINTER(c_int) lib.make_lp.argtypes =", "provide the RHS of constraint\" const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.add_constraintex(self.lp,", "None self.rhs = None def fill_buffers(self, colno, row): \"Fill colno", "[self]) def __add__(self, other): if isinstance(other, LpConstraint): return other.__add__(self) return", "var.value = self.rowbuff[i] def solve(self): \"Solve the model.\" lib.set_maxim(self.lp) if", "LpConstraint([1, 1], [self, other]) class LpConstraint(object): \"A LpSolve constraint.\" def", "Linear Programming Engine.\" def __init__(self, maxvars, debug=False): self.debug = debug", "model.\" lib.set_maxim(self.lp) if self.debug: lib.write_lp(self.lp, b\"debug-model.lp\") else: lib.set_verbose(self.lp, 3) ret", "= [] self.lp = lib.make_lp(0, maxvars) assert self.lp != 0,", "ver))) else: raise ValueError(\"Can't load LpSolve library on this platform.\")", "* maxvars)() lib.set_add_rowmode(self.lp, 1) def __del__(self): lib.delete_lp(self.lp) def constraint(self, const):", "assert ret == 1, \"Can't get variable values\" for i,", "row[i] = num def __add__(self, other): if isinstance(other, LpVariable): return", "self.lp = lib.make_lp(0, maxvars) assert self.lp != 0, \"Can't construct", "else: raise ValueError(\"Can't load LpSolve library on this platform.\") #", "= c_void_p lib.delete_lp.argtypes = [c_void_p] lib.set_binary.argtypes = [c_void_p, c_int, c_ubyte]", "lib.set_int.argtypes = [c_void_p, c_int, c_ubyte] lib.set_int.restype = c_ubyte lib.add_constraintex.argtypes =", "const): \"Add a new constraint into the model.\" assert const.optype", "fill_buffers(self, colno, row): \"Fill colno and row buffers for calling", "\"Fill colno and row buffers for calling LpSolve.\" for i,", "(1, val) return self def __eq__(self, val): self.optype, self.rhs =", "library on this platform.\") # Make the bindings c_double_p =", "(vtype == \"int\")) def __rmul__(self, num): return LpConstraint([num], [self]) def", "lp.maxvars, \"Can't add a variable: \" self.index = len(lp.vars) +", "len(set(c.vars)), \"Some variables appear several times\" return c def __le__(self,", "self.vars = vars self.optype = None self.rhs = None def", "= path.dirname(__file__) if sys.platform == \"win32\": lib = windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\"", "if sys.platform == \"win32\": lib = windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" % ver)))" ]
[ "id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id,", "id=L7RULE_ID) # Test the revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff) Fix", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_member = database_tasks.UpdateMemberInDB()", "@mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "database_tasks.CreateAmphoraInDB() amp_id = create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id,", "= database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with(", "mock_listener_repo_update.side_effect = Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def", "mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active =", "mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST',", "LB_ID, qos_policy_id='fool') def test_update_listener_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, weight=1, ip_address='10.1.0.0') # Test the revert mock_member_repo_update.reset_mock()", "self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get, mock_generate_uuid,", "test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "# Test the revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff) Fix #", "constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') #", "result = get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid,", "mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def", "lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_listener_get.return_value = _listener_mock _loadbalancer_mock.listeners = [_listener_mock] get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer() result", "new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def test_mark_amphora_role_indb(self, mock_generate_uuid,", "mock.MagicMock() _vip_mock.port_id = PORT_ID _vip_mock.subnet_id = SUBNET_ID _vip_mock.ip_address = VIP_IP", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_member = database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight': 1,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(", "mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted = database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID,", "uuidutils.generate_uuid() amp2 = mock.MagicMock() amp2.id = uuidutils.generate_uuid() lb = mock.MagicMock()", "provisioning_status=constants.ERROR) # Test the revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') update_listener.revert(listener_dict)", "# Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\",", "Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock)", "= self.health_mon_mock self.member_mock = { constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID: POOL_ID, }", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule = database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock, {'type':", "mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7policy_repo_update.reset_mock()", "mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create =", "mark_busy = ( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) def", "HA_IP _amphora_mock.ha_port_id = HA_PORT_ID _amphora_mock.vrrp_port_id = VRRP_PORT_ID _amphora_mock.role = AMP_ROLE", "mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID)", "= uuidutils.generate_uuid() COMPUTE_FLAVOR = uuidutils.generate_uuid() _amphora_mock = mock.MagicMock() _amphora_mock.id =", "self.member_mock = mock.MagicMock() self.member_mock.id = MEMBER_ID self.db_pool_mock = mock.MagicMock() self.db_pool_mock.id", "self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') # Test revert with exception mock_amphora_repo_delete.reset_mock()", "def test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "the revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) # Test the revert with", "@mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "# Test the revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with(", "= VRRP_PORT_ID _amphora_mock.role = AMP_ROLE _amphora_mock.vrrp_id = VRRP_ID _amphora_mock.vrrp_priority =", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID) # Test the revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID,", "mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) # Test the", "the revert with exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID)", "@mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "the revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID,", "mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert", "description='test2') # Test the revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID) # Test the revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID})", "revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "Test the revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST',", "in compliance with the License. You may obtain # a", "= database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\",", "@mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id( self, mock_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "HM_ID, delay=1, timeout=2) # Test the revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with(", "exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self,", "L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7rule_repo_update.reset_mock()", "def test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with(", "# Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = (", "mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def", "vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='STANDALONE',", "vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id( self, mock_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info = database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock()", "test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db =", "revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "= mock.MagicMock( return_value={'id': LISTENER_ID}) _listener_mock.id = LISTENER_ID _listener_mock.to_dict = _listener_to_dict_mock", "(database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get',", "the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create = (database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock)", "LISTENER_ID _listener_mock.to_dict = _listener_to_dict_mock _tf_failure_mock = mock.Mock(spec=failure.Failure) _vip_mock = mock.MagicMock()", "provisioning_status=constants.PENDING_DELETE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID,", "id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST',", "def test_update_listener_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_listener", "revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon = database_tasks.UpdateHealthMonInDB()", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool = database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID)", "from taskflow.types import failure from octavia.common import constants from octavia.common", "= create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id) #", "import failure from octavia.common import constants from octavia.common import data_models", "self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG,", "repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect =", "new_amp.vrrp_priority) def test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update,", "update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid,", "data_models.Listener(id='listener1', default_pool=default_pool) members2 = [{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID: 'member4'}] redirect_pool =", "with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID,", "self.health_mon_mock.pool_id = POOL_ID self.listener_mock = mock.MagicMock() self.listener_mock.id = LISTENER_ID self.loadbalancer_mock", "Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_allocated_in_db.revert(None,", "= VIP_IP _vrrp_group_mock = mock.MagicMock() _cert_mock = mock.MagicMock() _compute_mock =", "new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY,", "uuidutils.generate_uuid() PORT_ID = uuidutils.generate_uuid() SUBNET_ID = uuidutils.generate_uuid() VRRP_PORT_ID = uuidutils.generate_uuid()", "ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "= Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID)", "repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID) # Test the revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) #", "the revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) #", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='MASTER',", "as repo import octavia.tests.unit.base as base AMP_ID = uuidutils.generate_uuid() COMPUTE_ID", "'TEST', LB_ID, name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool') def test_update_listener_in_db(self, mock_generate_uuid,", "def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db", "= (database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE) # Test", "@mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "def test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG,", "mock_amphora_repo_delete): disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def", "mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool = database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID) #", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration() key = utils.get_six_compatible_server_certs_key_passphrase()", "mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self,", "LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST',", "= mock.MagicMock() _loadbalancer_mock.id = LB_ID _loadbalancer_mock.amphorae = [_amphora_mock] _l7policy_mock =", "mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the", "= mock.MagicMock() self.l7policy_mock.id = L7POLICY_ID self.l7rule_mock = mock.MagicMock() self.l7rule_mock.id =", "mock_amphora_repo_delete): update_vip = database_tasks.UpdateVIPAfterAllocation() loadbalancer = update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer)", "database_tasks.ReloadAmphora() amp = reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get',", "id=LB_ID, provisioning_status=constants.ERROR) # Test revert with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect =", "members=members1, health_monitor=health_monitor) listener1 = data_models.Listener(id='listener1', default_pool=default_pool) members2 = [{constants.MEMBER_ID: 'member3'},", "role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None)", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_member = database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight':", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with(", "repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock()", "# Test the revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "= database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2', 'vip': {'qos_policy_id': 'fool'}})", "def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect", "# Test the revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR)", "= database_tasks.GetListenersFromLoadbalancer() result = get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result)", "with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_amphora_repo_delete): update_member = database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight': 1, 'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with(", "with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id,", "mock_amphora_repo_delete): mark_loadbalancer_active = database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0,", "mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock)", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active =", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update = (database_tasks. MarkL7PolicyPendingUpdateInDB())", "# operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health =", "constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID)", "mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy = database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST',", "revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) #", "{'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "self.member_mock = { constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID: POOL_ID, } self.l7policy_mock =", "constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH,", "'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock)", "mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) #", "def test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info", "revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock)", "mock_health_mon_repo_update.side_effect = Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create = (database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock)", "vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock()", "mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert", "revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST',", "L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "= database_tasks.UpdateAmphoraDBCertExpiration() key = utils.get_six_compatible_server_certs_key_passphrase() fer = fernet.Fernet(key) _pem_mock =", "'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') # Test the", "and limitations # under the License. # import random from", "def test_associate_failover_amphora_with_lb_id( self, mock_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "id=LB_ID) def test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) # Test the revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock)", "HM_ID, provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "= '192.0.2.2' LISTENER_ID = uuidutils.generate_uuid() POOL_ID = uuidutils.generate_uuid() HM_ID =", "'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update,", "the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR)", "repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID) # Test the revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) #", "Copyright 2015 Hewlett-Packard Development Company, L.P. # # Licensed under", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update = (database_tasks. MarkL7RulePendingUpdateInDB())", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db = (database_tasks. MarkAmphoraAllocatedInDB())", "compute_id=COMPUTE_ID) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect =", "mark_l7policy_pending_create = (database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) #", "= {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None,", "# Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_reload_load_balancer(self, mock_lb_get,", "name='test', description='test2') # Test the revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect", "id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID,", "to in writing, software # distributed under the License is", "test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_lb_update, mock_listener_update, mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id", "mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mock_amphora_repo_delete): disable_amp_health = ( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update')", "revert with exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect = Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with(", "update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert mock_member_repo_update.reset_mock()", "_amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate')", "the revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) # TODO(johnsom) Fix # repo.MemberRepository.delete.assert_called_once_with( #", "# 'TEST', # POOL_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete,", "repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock()", "mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7policy_repo_update.reset_mock()", "or agreed to in writing, software # distributed under the", "_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self, mock_amphora_get,", "ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id( self, mock_associate, mock_generate_uuid,", "# Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail')", "the revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID,", "id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "from octavia.common import data_models from octavia.common import utils from octavia.controller.worker.v2.tasks", "def test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer", "mock_amphora_repo_delete): update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP,", "update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self, mock_l7rule_repo_update,", "result) def test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "(database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE) # Test the", "mock_amphora_repo_delete): mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.DELETED) #", "def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "Apache License, Version 2.0 (the \"License\"); you may # not", "= POOL_ID self.listener_mock = mock.MagicMock() self.listener_mock.id = LISTENER_ID self.loadbalancer_mock =", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = ( database_tasks.DisableLBAmphoraeHealthMonitoring())", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create = (database_tasks. MarkL7RulePendingCreateInDB())", "mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db =", "L7RULE_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "# Test the revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "@mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring()", "the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "def test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID) # Test the revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID)", "MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock)", "mock_loadbalancer_repo_update.side_effect = Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def", "return_value=AMP_ID) class TestDatabaseTasks(base.TestCase): def setUp(self): self.health_mon_mock = mock.MagicMock() self.health_mon_mock.id =", "amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id)", "provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock()", "Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST',", "@mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class TestDatabaseTasks(base.TestCase): def setUp(self): self.health_mon_mock", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0])", "= (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) # Test", "Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update,", "mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "License, Version 2.0 (the \"License\"); you may # not use", "(database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE) # Test the revert", "mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = ( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST',", "Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) #", "= uuidutils.generate_uuid() LB_ID = uuidutils.generate_uuid() SERVER_GROUP_ID = uuidutils.generate_uuid() LB_NET_IP =", "amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_busy=False) def test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "@mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with", "AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) # Test the revert with exception mock_amphora_repo_update.reset_mock()", "(database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE) # Test the", "not use this file except in compliance with the License.", "delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID,", "mock.MagicMock() lb.amphorae = [amp1, amp2] mock_amphora_get.side_effect = [_amphora_mock, None] get_amps_from_lb_obj", "status=constants.DELETED) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID)", "mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener = database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID)", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon = database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay': 1,", "mock_member_repo_update.side_effect = Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp')", "database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock)", "mock from oslo_db import exception as odb_exceptions from oslo_utils import", "unused_pool] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb)", "_amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG,", "= COMPUTE_ID _amphora_mock.lb_network_ip = LB_NET_IP _amphora_mock.vrrp_ip = VRRP_IP _amphora_mock.ha_ip =", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData()", "'192.168.3.11' AMP_ROLE = 'FAKE_ROLE' VRRP_ID = random.randrange(255) VRRP_PRIORITY = random.randrange(100)", "mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy = database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action':", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock)", "= database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) #", "mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update =", "with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp1", "self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id)", "id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG,", "with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "= get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result) def test_get_vip_from_loadbalancer(self, mock_generate_uuid,", "L7RULE_ID = uuidutils.generate_uuid() VIP_IP = '192.168.127.12' VRRP_IP = '172.16.31.10' HA_IP", "operating_status=constants.ONLINE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID,", "revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete = (database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock)", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb = database_tasks.ReloadLoadBalancer() lb", "delete_l7rule = database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID) # Test the", "id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert", "= uuidutils.generate_uuid() PORT_ID = uuidutils.generate_uuid() SUBNET_ID = uuidutils.generate_uuid() VRRP_PORT_ID =", "exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update')", "Test the revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) #", "MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert", "provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert LB_ID", "'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') # Test the revert mock_l7rule_repo_update.reset_mock()", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.DELETED) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock)", "from listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR)", "revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) #", "MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert", "revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "the revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test", "provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)])", "[mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "self.loadbalancer_mock = mock.MagicMock() self.loadbalancer_mock.id = LB_ID self.member_mock = mock.MagicMock() self.member_mock.id", "'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid,", "the revert with exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect", "mock_amphora_repo_delete): mark_amp_pending_update_in_db = (database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_UPDATE)", "HM_ID self.health_mon_mock.pool_id = POOL_ID self.listener_mock = mock.MagicMock() self.listener_mock.id = LISTENER_ID", "id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect", "'member2'}] health_monitor = data_models.HealthMonitor(id='hm1') default_pool = data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor) listener1", "mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb", "the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "License is distributed on an \"AS IS\" BASIS, WITHOUT #", "{ constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID: POOL_ID, } self.l7policy_mock = mock.MagicMock() self.l7policy_mock.id", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "VRRP_IP _amphora_mock.ha_ip = HA_IP _amphora_mock.ha_port_id = HA_PORT_ID _amphora_mock.vrrp_port_id = VRRP_PORT_ID", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active = (database_tasks.MarkMemberActiveInDB())", "'TEST', HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock()", "\"License\"); you may # not use this file except in", "revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST',", "mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test", "'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG,", "Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self,", "id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect", "delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the", "mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def", "provisioning_status=constants.PENDING_DELETE) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with", "mock_amphora_repo_update, mock_amphora_repo_delete): update_member = database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight': 1, 'ip_address': '10.1.0.0'})", "mock_amphora_repo_delete): listeners = [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp = database_tasks.ReloadAmphora() amp = reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with(", "provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "'test', 'description': 'test2', 'session_persistence': sp_dict} update_pool = database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict)", "@mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session,", "@mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {'loadbalancer_id': LB_ID} mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener()", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool = data_models.Pool(id='unused_pool') members1", "mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "'/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') # Test the", "License. # import random from cryptography import fernet import mock", "# Test the revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "= mock.MagicMock() _cert_mock = mock.MagicMock() _compute_mock = mock.MagicMock() _compute_mock.lb_network_ip =", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) # Test the revert with", "# operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def test_reload_amphora(self, mock_amp_get, mock_generate_uuid, mock_LOG, mock_get_session,", "test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_member", "LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() # Test the", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value = _listener_mock _loadbalancer_mock.listeners = [_listener_mock] get_list_from_lb_obj", "exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "create_vrrp_group = database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT,", "redirect_pool = data_models.Pool(id='redirect_pool', members=members2) l7rules = [data_models.L7Rule(id='rule1')] redirect_policy = data_models.L7Policy(id='redirect_policy',", "status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None,", "mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count)", "_amphora_mock.vrrp_priority = VRRP_PRIORITY _amphorae = [_amphora_mock] _loadbalancer_mock = mock.MagicMock() _loadbalancer_mock.id", "uuidutils.generate_uuid() SUBNET_ID = uuidutils.generate_uuid() VRRP_PORT_ID = uuidutils.generate_uuid() HA_PORT_ID = uuidutils.generate_uuid()", "database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID) # Test revert assoc_fo_amp_lb_id.revert(AMP_ID)", "Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock)", "provisioning_status=constants.ERROR) # Test the revert no LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None,", "test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete", "mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid,", "= map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, 'fakeaz')", "def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID,", "mark_pool_pending_update = (database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE) #", "the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test", "mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update,", "map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, None)", "Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_booting_in_db(self,", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_listener_repo_update.reset_mock()", "repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted = database_tasks.MarkListenerDeletedInDB()", "update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) def test_get_amphora_details(self, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update')", "mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.'", "mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update')", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value = _listener_mock", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST',", "revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with(", "provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect = ['TEST',", "'192.168.127.12' VRRP_IP = '172.16.31.10' HA_IP = '192.168.3.11' AMP_ROLE = 'FAKE_ROLE'", "mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update')", "mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete')", "repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "LB_NET_IP _amphora_mock.vrrp_ip = VRRP_IP _amphora_mock.ha_ip = HA_IP _amphora_mock.ha_port_id = HA_PORT_ID", "(database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) # Test the", "mock.MagicMock() _amphora_mock.id = AMP_ID _amphora_mock.compute_id = COMPUTE_ID _amphora_mock.lb_network_ip = LB_NET_IP", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listeners = [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb =", "L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG,", "revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test Not", "'TEST', # POOL_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid,", "None} update_dict = {'name': 'test', 'description': 'test2', 'session_persistence': sp_dict} update_pool", "mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR,", "provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect =", "update_listener.execute(listener_dict, {'name': 'test', 'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, name='test', description='test2')", "def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_lb_update, mock_listener_update, mock_amphora_update, mock_amphora_delete):", "mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_DELETE) # Test the revert mock_amphora_repo_update.reset_mock()", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "Test the revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID,", "Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self, mock_member_repo_update,", "mock.MagicMock() self.l7rule_mock.id = L7RULE_ID self.l7rule_mock.l7policy = self.l7policy_mock super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create',", "uuidutils.generate_uuid() VIP_IP = '192.168.127.12' VRRP_IP = '172.16.31.10' HA_IP = '192.168.3.11'", "revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule", "Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock)", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "test_associate_failover_amphora_with_lb_id( self, mock_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db = (database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id)", "'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect =", "map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock)", "amp1.id = uuidutils.generate_uuid() amp2 = mock.MagicMock() amp2.id = uuidutils.generate_uuid() lb", "the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "mock_amphora_repo_update.reset_mock() failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb =", "exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR)", "update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value = _listener_mock _loadbalancer_mock.listeners", "provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID,", "failure from octavia.common import constants from octavia.common import data_models from", "the revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test", "Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_create.revert(POOL_ID)", "mark_l7policy_pending_delete = (database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) #", "'TEST', id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id) # Test the revert", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener = database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with(", "get_amp_details = database_tasks.GetAmphoraDetails() new_amp = get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip)", "the revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP')", "mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with", "id=LB_ID, provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG,", "Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock)", "distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR", "update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2')", "mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called() # Test the revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert =", "exc from taskflow.types import failure from octavia.common import constants from", "LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called() # Test the revert mock_loadbalancer_repo_update.reset_mock()", "mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2,", "self.health_mon_mock.id = HM_ID self.health_mon_mock.pool_id = POOL_ID self.listener_mock = mock.MagicMock() self.listener_mock.id", "LB_ID, provisioning_status=constants.ACTIVE) # Test with LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock()", "the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test", "mock_amphora_repo_delete): mark_listener_pending_delete = (database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE)", "_loadbalancer_mock.listeners = [_listener_mock] get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer() result = get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST',", "database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight': 1, 'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, weight=1,", "with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with(", "mock_amphora_repo_delete): update_listener = database_tasks.UpdateListenerInDB() listener_dict = {constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict, {'name':", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test", "mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP)", "_amphora_mock.id = AMP_ID _amphora_mock.compute_id = COMPUTE_ID _amphora_mock.lb_network_ip = LB_NET_IP _amphora_mock.vrrp_ip", "mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the", "Test the revert with exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect = Exception('fail') update_pool.revert(POOL_ID)", "import constants from octavia.common import data_models from octavia.common import utils", "self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result) def test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self, mock_member_repo_update,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock()", "self.l7policy_mock.id = L7POLICY_ID self.l7rule_mock = mock.MagicMock() self.l7rule_mock.id = L7RULE_ID self.l7rule_mock.l7policy", "MEMBER_ID, weight=1, ip_address='10.1.0.0') # Test the revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with(", "mock_pool_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool =", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock)", "(database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE) # Test the", "mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE:", "self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def test_mark_amphora_role_indb(self,", "test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp", "= [_amphora_mock, None] get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer() result = get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock],", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create = (database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock)", "= (database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE) # Test", "mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update, mock_generate_uuid,", "Version 2.0 (the \"License\"); you may # not use this", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert no LB_ID", "exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "id=LB_ID) self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get,", "Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG,", "= database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID) # Test the revert", "provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info = database_tasks.UpdateLBServerGroupInDB()", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update = (database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "LISTENER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID,", "mark_member_pending_create = (database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE) #", "Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self,", "redirect_policy = data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules) l7policies = [redirect_policy] listener2 =", "mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert", "mock_amphora_repo_delete): update_health_mon = database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay': 1, 'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with(", "mock_amphora_repo_delete): delete_l7rule = database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID) # Test", "mock_amphora_repo_update, mock_amphora_repo_delete): delete_member = database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID) #", "uuidutils.generate_uuid() VRRP_PORT_ID = uuidutils.generate_uuid() HA_PORT_ID = uuidutils.generate_uuid() L7POLICY_ID = uuidutils.generate_uuid()", "= mock.MagicMock() _compute_mock.lb_network_ip = LB_NET_IP _compute_mock.cached_zone = CACHED_ZONE _compute_mock.image_id =", "PORT_ID _vip_mock.subnet_id = SUBNET_ID _vip_mock.ip_address = VIP_IP _vrrp_group_mock = mock.MagicMock()", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock.MagicMock() self.member_mock.id = MEMBER_ID self.db_pool_mock = mock.MagicMock() self.db_pool_mock.id = POOL_ID", "@mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG,", "the revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( #", "mock_amphora_repo_delete): mark_busy = database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update')", "mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG,", "mock_amphora_repo_delete): delete_member = database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID) # Test", "self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid, mock_LOG,", "listener2 = data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies = l7policies listeners = [listener1,", "TestDatabaseTasks(base.TestCase): def setUp(self): self.health_mon_mock = mock.MagicMock() self.health_mon_mock.id = HM_ID self.health_mon_mock.pool_id", "lb.amphorae = [amp1, amp2] mock_amphora_get.side_effect = [_amphora_mock, None] get_amps_from_lb_obj =", "mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST',", "Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) #", "id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect", "= mock.MagicMock() _amphora_mock.id = AMP_ID _amphora_mock.compute_id = COMPUTE_ID _amphora_mock.lb_network_ip =", "the revert LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with(", "listener_dict = {'loadbalancer_id': LB_ID} mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid, mock_LOG, mock_get_session,", "L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "# Test the revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR)", "mock_amphora_repo_delete): mark_l7rule_pending_create = (database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE)", "_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG,", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active = (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID,", "key = utils.get_six_compatible_server_certs_key_passphrase() fer = fernet.Fernet(key) _pem_mock = fer.encrypt( utils.get_six_compatible_value('test_cert')", "provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect =", "compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role)", "uuidutils.generate_uuid() HA_PORT_ID = uuidutils.generate_uuid() L7POLICY_ID = uuidutils.generate_uuid() L7RULE_ID = uuidutils.generate_uuid()", "= LISTENER_ID self.loadbalancer_mock = mock.MagicMock() self.loadbalancer_mock.id = LB_ID self.member_mock =", "lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get, mock_generate_uuid, mock_LOG,", "= Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool = data_models.Pool(id='unused_pool')", "id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "Test the revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with(", "= Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self,", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid, mock_LOG,", "the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with(", "= [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True)", "= data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the", "members2 = [{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID: 'member4'}] redirect_pool = data_models.Pool(id='redirect_pool', members=members2)", "compliance with the License. You may obtain # a copy", "database_tasks.UpdateVIPAfterAllocation() loadbalancer = update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete = (database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock)", "id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert with exception", "= {constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict, {'name': 'test', 'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "= self.l7policy_mock super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def test_create_amphora_in_db(self, mock_create, mock_generate_uuid,", "[mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)]) def", "mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def", "delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock)", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool = database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with(", "# Test the revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR)", "mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with", "map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id,", "'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid,", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception", "LISTENER_ID}) _listener_mock.id = LISTENER_ID _listener_mock.to_dict = _listener_to_dict_mock _tf_failure_mock = mock.Mock(spec=failure.Failure)", "database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2', 'vip': {'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with(", "def test_get_amphora_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details", "mock.MagicMock() amp1.id = uuidutils.generate_uuid() amp2 = mock.MagicMock() amp2.id = uuidutils.generate_uuid()", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_vip = database_tasks.UpdateVIPAfterAllocation() loadbalancer = update_vip.execute(LB_ID,", "revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete,", "'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock()", "mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb =", "mock_amphora_repo_delete): update_server_group_info = database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID)", "mock_amphora_repo_delete): mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE)", "= L7POLICY_ID self.l7rule_mock = mock.MagicMock() self.l7rule_mock.id = L7RULE_ID self.l7rule_mock.l7policy =", "the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count)", "provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls(", "mock_get_session.side_effect = ['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group = database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST',", "# # Unless required by applicable law or agreed to", "# TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, #", "POOL_ID, # provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session,", "mock.MagicMock() self.l7policy_mock.id = L7POLICY_ID self.l7rule_mock = mock.MagicMock() self.l7rule_mock.id = L7RULE_ID", "Test the revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) #", "mark_loadbalancer_active = database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count)", "mock_amphora_repo_delete): mark_l7policy_pending_update = (database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE)", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock()", "mock.MagicMock() _compute_mock = mock.MagicMock() _compute_mock.lb_network_ip = LB_NET_IP _compute_mock.cached_zone = CACHED_ZONE", "= database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID,", "mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') # Test revert with exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect", "_compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST',", "Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_deleted.revert(self.listener_mock)", "(database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE)", "= (database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) # Test", "Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST',", "mock_amphora_repo_delete): amp1 = mock.MagicMock() amp1.id = uuidutils.generate_uuid() amp2 = mock.MagicMock()", "provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with(", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST',", "mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID)", "[mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock()", "(database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE) # Test the", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update = (database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "# repo.PoolRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def", "the revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test", "Test the revert no LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called()", "the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with(", "LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID:", "test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert", "import utils from octavia.controller.worker.v2.tasks import database_tasks from octavia.db import repositories", "uuidutils.generate_uuid() MEMBER_ID = uuidutils.generate_uuid() PORT_ID = uuidutils.generate_uuid() SUBNET_ID = uuidutils.generate_uuid()", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db = (database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock)", "mock.MagicMock() _l7policy_mock.id = L7POLICY_ID _l7rule_mock = mock.MagicMock() _l7rule_mock.id = L7RULE_ID", "def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "@mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "_vip_mock = mock.MagicMock() _vip_mock.port_id = PORT_ID _vip_mock.subnet_id = SUBNET_ID _vip_mock.ip_address", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_members = database_tasks.UpdatePoolMembersOperatingStatusInDB() update_members.execute(POOL_ID, constants.ONLINE) mock_member_repo_update_pool_members.assert_called_once_with(", "= random.randrange(255) VRRP_PRIORITY = random.randrange(100) CACHED_ZONE = 'zone1' IMAGE_ID =", "{constants.MEMBER_ID: 'member4'}] redirect_pool = data_models.Pool(id='redirect_pool', members=members2) l7rules = [data_models.L7Rule(id='rule1')] redirect_policy", "'TEST', MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid, mock_LOG,", "= uuidutils.generate_uuid() HM_ID = uuidutils.generate_uuid() MEMBER_ID = uuidutils.generate_uuid() PORT_ID =", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update = (database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock)", "self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST',", "LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock)", "mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) def test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "create_amp_in_db = database_tasks.CreateAmphoraInDB() amp_id = create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.PENDING_CREATE,", "revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the", "Test the revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) #", "= uuidutils.generate_uuid() SERVER_GROUP_ID = uuidutils.generate_uuid() LB_NET_IP = '192.0.2.2' LISTENER_ID =", "'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active =", "_tf_failure_mock = mock.Mock(spec=failure.Failure) _vip_mock = mock.MagicMock() _vip_mock.port_id = PORT_ID _vip_mock.subnet_id", "'test2', 'vip': {'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST',", "busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update = (database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock)", "mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) #", "'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp = database_tasks.ReloadAmphora() amp", "= LB_ID update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2',", "'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG,", "BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either", "revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) # Test the", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp1 = mock.MagicMock() amp1.id = uuidutils.generate_uuid()", "Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_pending_delete.revert(self.listener_mock)", "SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID) # Test the revert mock_listener_repo_update.reset_mock()", "'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the", "{'loadbalancer_id': LB_ID} mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp1 = mock.MagicMock()", "= LB_NET_IP _compute_mock.cached_zone = CACHED_ZONE _compute_mock.image_id = IMAGE_ID _compute_mock.compute_flavor =", "id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get,", "revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy = database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT})", "# 'TEST', # POOL_ID, # provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self, mock_member_repo_delete,", "mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG,", "odb_exceptions.DBDuplicateEntry] create_vrrp_group = database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''),", "@mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class TestDatabaseTasks(base.TestCase): def setUp(self): self.health_mon_mock = mock.MagicMock()", "self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='BACKUP',", "(database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete = (database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def", "= database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert", "reload_amp = database_tasks.ReloadAmphora() amp = reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) self.assertEqual(_amphora_mock,", "= data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules) l7policies = [redirect_policy] listener2 = data_models.Listener(id='listener2',", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete = (database_tasks.", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_vip = database_tasks.UpdateVIPAfterAllocation() loadbalancer = update_vip.execute(LB_ID, _vip_mock)", "'TEST', AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) def", "def test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "import fernet import mock from oslo_db import exception as odb_exceptions", "self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') # Test revert", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST',", "may obtain # a copy of the License at #", "mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "listener_dict = {constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict, {'name': 'test', 'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with(", "@mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "@mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update,", "redirect_pool, unused_pool] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True)", "Unless required by applicable law or agreed to in writing,", "mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def", "mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb = database_tasks.ReloadLoadBalancer() lb = reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST',", "LB_ID, provisioning_status=constants.DELETED) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb)", "= database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) #", "# Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR)", "provisioning_status=constants.ACTIVE) # Test with no LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called()", "mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def", "mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock)", "mock_amphora_repo_update, mock_amphora_repo_delete): update_listener = database_tasks.UpdateListenerInDB() listener_dict = {constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict,", "the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db = (database_tasks. MarkAmphoraPendingUpdateInDB())", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db = (database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mock_amphora_repo_delete): _loadbalancer_mock.vip = _vip_mock get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer() result = get_vip_from_lb_obj.execute(_loadbalancer_mock)", "_listener_mock.id = LISTENER_ID _listener_mock.to_dict = _listener_to_dict_mock _tf_failure_mock = mock.Mock(spec=failure.Failure) _vip_mock", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "Test Not Found Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with(", "L7POLICY_ID, provisioning_status=constants.ERROR) def test_get_amphora_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active = (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener = database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID:", "mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with", "Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update,", "def test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted", "def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_amphora_repo_delete): update_l7policy = database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID,", "mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) # Test the revert mock_health_mon_repo_update.reset_mock()", "mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "# Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail')", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock)", "the revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test", "mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self,", "'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP,", "'TEST', id=AMP_ID) self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid,", "test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon", "revert with exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update')", "the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test", "mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR)", "'vip': {'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID,", "_loadbalancer_mock.vip = _vip_mock get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer() result = get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock,", "_pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db = database_tasks.CreateAmphoraInDB() amp_id = create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST',", "exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR)", "_vip_mock.subnet_id = SUBNET_ID _vip_mock.ip_address = VIP_IP _vrrp_group_mock = mock.MagicMock() _cert_mock", "= database_tasks.GetAmphoraeFromLoadbalancer() result = get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self,", "= Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self,", "revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "Test the revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST',", "assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID) # Test revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST',", "mock_loadbalancer_repo_update.assert_not_called() # Test the revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with(", "'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with(", "'member1'}, {constants.MEMBER_ID: 'member2'}] health_monitor = data_models.HealthMonitor(id='hm1') default_pool = data_models.Pool(id='default_pool', members=members1,", "disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG,", "Development Company, L.P. # # Licensed under the Apache License,", "= (database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) # Test", "= [_amphora_mock] _loadbalancer_mock = mock.MagicMock() _loadbalancer_mock.id = LB_ID _loadbalancer_mock.amphorae =", "mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with", "def test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self, mock_member_repo_update,", "# Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR)", "mock_amphora_repo_delete): mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_BOOTING,", "mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id = LB_ID update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test',", "update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7rule_repo_update.reset_mock()", "repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip = _vip_mock get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer() result", "(database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE)", "vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid,", "test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict =", "the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with(", "mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert", "provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)])", "[]) mock_loadbalancer_repo_update.assert_not_called() # Test the revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict])", "update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert mock_listener_repo_update.reset_mock()", "self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "from octavia.common import constants from octavia.common import data_models from octavia.common", "= Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self,", "either express or implied. See the # License for the", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "provisioning_status=constants.ACTIVE) # Test the revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID,", "may # not use this file except in compliance with", "provisioning_status=constants.PENDING_DELETE) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID,", "Test the revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID,", "failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid,", "update_member = database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight': 1, 'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST',", "constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID) # Test the revert repo.ListenerRepository.delete.reset_mock()", "= Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self,", "revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool = data_models.Pool(id='unused_pool') members1 =", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with(", "status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert with exception mock_amphora_repo_update.reset_mock()", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active = (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with(", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test", "cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_LOG, mock_get_session, mock_lb_update, mock_listener_update, mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id = LB_ID update_load_balancer", "= [_listener_mock] get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer() result = get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id)", "test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db =", "self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create = (database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with(", "= Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict = {'type': 'SOURCE_IP', 'cookie_name': None}", "= (database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_UPDATE) # Test", "mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock()", "def test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "# 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def test_reload_amphora(self,", "id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID,", "octavia.db import repositories as repo import octavia.tests.unit.base as base AMP_ID", "mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock()", "result = get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result) def test_get_vip_from_loadbalancer(self,", "[{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID: 'member2'}] health_monitor = data_models.HealthMonitor(id='hm1') default_pool = data_models.Pool(id='default_pool',", "{constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict])", "'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG,", "'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "Test the revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with(", "update_member.execute(self.member_mock, {'weight': 1, 'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, weight=1, ip_address='10.1.0.0')", "create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') # Test", "mock_member_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_member =", "mark_health_mon_pending_update = (database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE) #", "self.db_pool_mock = mock.MagicMock() self.db_pool_mock.id = POOL_ID self.db_pool_mock.health_monitor = self.health_mon_mock self.member_mock", "the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test", "mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7rule_repo_update.reset_mock()", "LISTENER_ID, provisioning_status=constants.DELETED) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def", "with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID,", "the revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test", "mock_amphora_repo_delete): mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE) #", "cert_busy=False) def test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active", "mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid,", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self, mock_amphora_repo_get,", "id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "amp_id = map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID,", "AMP_ID, cert_busy=False) def test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2,", "= update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db = (database_tasks.", "ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info = database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with(", "mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "= Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db(", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted = database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with(", "= [redirect_policy] listener2 = data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies = l7policies listeners", "# Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR)", "database_tasks.GetAmphoraeFromLoadbalancer() result = get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self, mock_listener_get,", "mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with", "Test revert with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id)", "test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create", "id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid, mock_LOG, mock_get_session,", "revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the", "vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG, mock_get_session,", "Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) #", "= utils.get_six_compatible_server_certs_key_passphrase() fer = fernet.Fernet(key) _pem_mock = fer.encrypt( utils.get_six_compatible_value('test_cert') )", "import random from cryptography import fernet import mock from oslo_db", "self.listener_mock = mock.MagicMock() self.listener_mock.id = LISTENER_ID self.loadbalancer_mock = mock.MagicMock() self.loadbalancer_mock.id", "with no LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called() # Test the", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_members = database_tasks.UpdatePoolMembersOperatingStatusInDB() update_members.execute(POOL_ID, constants.ONLINE) mock_member_repo_update_pool_members.assert_called_once_with( 'TEST',", "repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, None) self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id)", "mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self,", "disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring(", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) # Test the revert mock_amphora_repo_update.reset_mock()", "mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete = (database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with(", "name='test', description='test2') # Test the revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test", "'TEST', id=L7RULE_ID) # Test the revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff)", "AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG,", "repo.ListenerRepository.update.call_count) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect =", "mock_amphora_repo_update, mock_amphora_repo_delete): update_members = database_tasks.UpdatePoolMembersOperatingStatusInDB() update_members.execute(POOL_ID, constants.ONLINE) mock_member_repo_update_pool_members.assert_called_once_with( 'TEST', POOL_ID,", "self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid, mock_LOG, mock_get_session,", "provisioning_status=constants.ACTIVE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID,", "self.assertEqual(_amphora_mock.id, amp_id) # Test the revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP')", "def test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID,", "'description': 'test2', 'vip': {'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2')", "setUp(self): self.health_mon_mock = mock.MagicMock() self.health_mon_mock.id = HM_ID self.health_mon_mock.pool_id = POOL_ID", "'member4'}] redirect_pool = data_models.Pool(id='redirect_pool', members=members2) l7rules = [data_models.L7Rule(id='rule1')] redirect_policy =", "mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members, mock_generate_uuid,", "= mock.MagicMock() _vip_mock.port_id = PORT_ID _vip_mock.subnet_id = SUBNET_ID _vip_mock.ip_address =", "mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id,", "'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST',", "mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def", "revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the", "@mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "Fix # repo.MemberRepository.delete.assert_called_once_with( # 'TEST', # MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self,", "'TEST', AMP_ID, cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "_amphora_mock = mock.MagicMock() _amphora_mock.id = AMP_ID _amphora_mock.compute_id = COMPUTE_ID _amphora_mock.lb_network_ip", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update = (database_tasks.", "with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID,", "mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_pool_repo_update.reset_mock()", "L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail')", "mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert", "mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID,", "Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_booting_in_db.revert(None,", "listener2.l7policies = l7policies listeners = [listener1, listener2] pools = [default_pool,", "Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update')", "Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) #", "availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id, amp_id) amp_id", "LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST',", "id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "lb = mock.MagicMock() lb.amphorae = [amp1, amp2] mock_amphora_get.side_effect = [_amphora_mock,", "AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None,", "delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID) # Test the revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock)", "LISTENER_ID}], result) def test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID,", "database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy( self,", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG, mock_get_session,", "= database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) #", "COMPUTE_ID _amphora_mock.lb_network_ip = LB_NET_IP _amphora_mock.vrrp_ip = VRRP_IP _amphora_mock.ha_ip = HA_IP", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_amphora_repo_delete): update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active = database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update = (database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG,", "Test the revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) #", "# Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock)", "mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {'loadbalancer_id': LB_ID} mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict)", "mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj =", "AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert with exception", "mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result) def test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG, mock_get_session,", "vrrp_priority=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail')", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool()", "mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update')", "POOL_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info = database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDB()", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_member = database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock)", "mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_amphora_repo_delete): amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_busy=False) def", "= Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self,", "_amphora_mock.compute_id = COMPUTE_ID _amphora_mock.lb_network_ip = LB_NET_IP _amphora_mock.vrrp_ip = VRRP_IP _amphora_mock.ha_ip", "database_tasks.GetVipFromLoadbalancer() result = get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create,", "l7policies listeners = [listener1, listener2] pools = [default_pool, redirect_pool, unused_pool]", "mock_amphora_get.side_effect = [_amphora_mock, None] get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer() result = get_amps_from_lb_obj.execute(lb)", "mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update')", "mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update')", "= uuidutils.generate_uuid() HA_PORT_ID = uuidutils.generate_uuid() L7POLICY_ID = uuidutils.generate_uuid() L7RULE_ID =", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete = (database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self, mock_generate_uuid,", "test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer =", "test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool", "id=AMP_ID, status=constants.ERROR) def test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info = database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_members =", "exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create = (database_tasks. MarkL7PolicyPendingCreateInDB())", "mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool = data_models.Pool(id='unused_pool') members1 = [{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID:", "mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # TODO(johnsom) fix once", "database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7],", "revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the", "from cryptography import fernet import mock from oslo_db import exception", "mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with", "'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def test_reload_amphora(self, mock_amp_get,", "test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amp_failover_details(self,", "mock.MagicMock() self.listener_mock.id = LISTENER_ID self.loadbalancer_mock = mock.MagicMock() self.loadbalancer_mock.id = LB_ID", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result) def test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active = (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID,", "# Test the revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with(", "mock_amphora_repo_delete): update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with(", "= Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self,", "(database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) # Test the", "mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.DELETED) # Test", "HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "= MEMBER_ID self.db_pool_mock = mock.MagicMock() self.db_pool_mock.id = POOL_ID self.db_pool_mock.health_monitor =", "mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() # Test the revert with exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect", "provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "POOL_ID, {'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer() result = get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def", "revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def test_update_amphora_db_cert_exp(self,", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "_amphorae = [_amphora_mock] _loadbalancer_mock = mock.MagicMock() _loadbalancer_mock.id = LB_ID _loadbalancer_mock.amphorae", "def test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "(database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP,", "mark_health_mon_pending_create = (database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE) #", "random.randrange(255) VRRP_PRIORITY = random.randrange(100) CACHED_ZONE = 'zone1' IMAGE_ID = uuidutils.generate_uuid()", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info = database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "= database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID) # Test", "Test the revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) #", "amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, None) self.assertEqual(_amphora_mock.id, amp_id) amp_id", "revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2'})", "provisioning_status=constants.DELETED) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update')", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create = (database_tasks.", "= (database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE) # Test", "[data_models.L7Rule(id='rule1')] redirect_policy = data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules) l7policies = [redirect_policy] listener2", "the revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self,", "= Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_ready_in_db(self, mock_generate_uuid,", "mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert", "= database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls(", "VRRP_ID _amphora_mock.vrrp_priority = VRRP_PRIORITY _amphorae = [_amphora_mock] _loadbalancer_mock = mock.MagicMock()", "mock_amphora_repo_delete): mark_loadbalancer_pending_delete = (database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE)", "_loadbalancer_mock.amphorae = [_amphora_mock] _l7policy_mock = mock.MagicMock() _l7policy_mock.id = L7POLICY_ID _l7rule_mock", "mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id(", "'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock()", "id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get,", "= Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self,", "revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id)", "with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID,", "mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1,", "AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) def test_mark_listener_deleted_in_db(self,", "id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def", "# Test the revert LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None,", "mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls(", "the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mark_l7rule_pending_create = (database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE) #", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "= database_tasks.UpdateListenerInDB() listener_dict = {constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict, {'name': 'test', 'description':", "# POOL_ID, # provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid, mock_LOG,", "mock_pool_repo_update.side_effect = Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def", "Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(listener_dict)", "_compute_mock = mock.MagicMock() _compute_mock.lb_network_ip = LB_NET_IP _compute_mock.cached_zone = CACHED_ZONE _compute_mock.image_id", "= database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID) # Test the revert", "(database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE) # Test the", "'TEST', AMP_ID, role=None, vrrp_priority=None) # Test revert with exception mock_amphora_repo_update.reset_mock()", "'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid,", "mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with no", "oslo_db import exception as odb_exceptions from oslo_utils import uuidutils from", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, operating_status=constants.ONLINE,", "mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock)", "update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2', 'vip': {'qos_policy_id':", "listener1 = data_models.Listener(id='listener1', default_pool=default_pool) members2 = [{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID: 'member4'}]", "Test the revert with exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_server_group_info.revert(LB_ID,", "the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert", "= Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self,", "MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID)", "revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the", "with exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect = Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST',", "mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) # Test the revert", "test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy", "mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update')", "CACHED_ZONE = 'zone1' IMAGE_ID = uuidutils.generate_uuid() COMPUTE_FLAVOR = uuidutils.generate_uuid() _amphora_mock", "vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) #", "delete_l7policy = database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID) # Test the", "the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with(", "test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db =", "with exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def", "update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, update_dict) # Test the revert", "'description': 'test2', 'session_persistence': sp_dict} update_pool = database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with(", "HM_ID = uuidutils.generate_uuid() MEMBER_ID = uuidutils.generate_uuid() PORT_ID = uuidutils.generate_uuid() SUBNET_ID", "provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with(", "'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "HA_PORT_ID _amphora_mock.vrrp_port_id = VRRP_PORT_ID _amphora_mock.role = AMP_ROLE _amphora_mock.vrrp_id = VRRP_ID", "octavia.common import constants from octavia.common import data_models from octavia.common import", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update = (database_tasks. MarkHealthMonitorPendingUpdateInDB())", "LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict)", "mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict = {'type': 'SOURCE_IP', 'cookie_name': None} update_dict =", "AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG,", "get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid, mock_LOG, mock_get_session,", "L7POLICY_ID self.l7rule_mock = mock.MagicMock() self.l7rule_mock.id = L7RULE_ID self.l7rule_mock.l7policy = self.l7policy_mock", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID,", "revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the", "'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid, mock_LOG,", "'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock()", "{constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict, {'name': 'test', 'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID,", "the specific language governing permissions and limitations # under the", "mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) #", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST',", "Test with LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict = {constants.LISTENER_ID:", "HM_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the", "data_models.Pool(id='redirect_pool', members=members2) l7rules = [data_models.L7Rule(id='rule1')] redirect_policy = data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB())", "Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db( self,", "mock_get_session, mock_lb_update, mock_listener_update, mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id = LB_ID update_load_balancer =", "under the Apache License, Version 2.0 (the \"License\"); you may", "delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # TODO(johnsom) fix once provisioning", "amp2 = mock.MagicMock() amp2.id = uuidutils.generate_uuid() lb = mock.MagicMock() lb.amphorae", "(database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_UPDATE) # Test the", "# Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete = (database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock)", "id=L7POLICY_ID) # Test the revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff) Fix", "mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy = database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID) #", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info = database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with(", "Test the revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) # Test the revert", "provisioning_status=constants.PENDING_DELETE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID,", "operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None)", "= database_tasks.GetVipFromLoadbalancer() result = get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self,", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) def", "mock_amphora_repo_delete): mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE) #", "self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock()", "mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert", "mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update')", "exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR)", "mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID,", "= database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.DELETED) # Test the", "test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete", "AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) # Test the revert mock_amphora_repo_update.reset_mock()", "Found Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID)", "mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value = _listener_mock _loadbalancer_mock.listeners = [_listener_mock] get_list_from_lb_obj =", "Test the revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) #", "{'name': 'test', 'description': 'test2', 'vip': {'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID,", "# Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR)", "mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db", "repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update,", "required by applicable law or agreed to in writing, software", "constants.POOL_ID: POOL_ID, } self.l7policy_mock = mock.MagicMock() self.l7policy_mock.id = L7POLICY_ID self.l7rule_mock", "status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid,", "mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule = database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH, 'compare_type':", "= Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "mock_amphora_repo_delete): listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks.", "database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR)", "with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None)", "@mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_amphora_repo_delete): sp_dict = {'type': 'SOURCE_IP', 'cookie_name': None} update_dict = {'name':", "listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect = ['TEST', odb_exceptions.DBDuplicateEntry]", "'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, name='test', description='test2') # Test the revert", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.DELETED)", "= data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor) listener1 = data_models.Listener(id='listener1', default_pool=default_pool) members2 =", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete = (database_tasks. MarkHealthMonitorPendingDeleteInDB())", "provisioning_status=constants.ERROR) # Test Not Found Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()]", "# provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_listener = database_tasks.UpdateListenerInDB() listener_dict = {constants.LISTENER_ID: LISTENER_ID}", "'cookie_name': None} update_dict = {'name': 'test', 'description': 'test2', 'session_persistence': sp_dict}", "agreed to in writing, software # distributed under the License", "'TEST', id=POOL_ID) # Test the revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) # TODO(johnsom)", "revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST',", "listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id,", "with exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect = Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID,", "# Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR)", "'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "distributed under the License is distributed on an \"AS IS\"", "provisioning_status=constants.DELETED) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID,", "Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self,", "= (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create = (database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "self.member_mock.id = MEMBER_ID self.db_pool_mock = mock.MagicMock() self.db_pool_mock.id = POOL_ID self.db_pool_mock.health_monitor", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with(", "= Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self,", "L7RULE_ID self.l7rule_mock.l7policy = self.l7policy_mock super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def test_create_amphora_in_db(self,", "the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test", "CONDITIONS OF ANY KIND, either express or implied. See the", "revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the", "import database_tasks from octavia.db import repositories as repo import octavia.tests.unit.base", "with exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail')", "provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID,", "= database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight': 1, 'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID,", "uuidutils.generate_uuid() COMPUTE_ID = uuidutils.generate_uuid() LB_ID = uuidutils.generate_uuid() SERVER_GROUP_ID = uuidutils.generate_uuid()", "status added # repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # provisioning_status=constants.ERROR)", "reload_lb = database_tasks.ReloadLoadBalancer() lb = reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock,", "def test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "= database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy(", "Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db( self,", "mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule = database_tasks.UpdateL7RuleInDB() update_l7rule.execute(", "Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) #", "revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "_amphora_mock.vrrp_id = VRRP_ID _amphora_mock.vrrp_priority = VRRP_PRIORITY _amphorae = [_amphora_mock] _loadbalancer_mock", "def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update =", "'TEST', amphora_id=AMP_ID, busy=True) def test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listeners", "= database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID,", "mark_l7rule_pending_delete = (database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE) #", "the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "POOL_ID = uuidutils.generate_uuid() HM_ID = uuidutils.generate_uuid() MEMBER_ID = uuidutils.generate_uuid() PORT_ID", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db = (database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mark_busy = database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def", "Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock)", "# Test the revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with(", "mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listeners = [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb", "LB_ID _loadbalancer_mock.amphorae = [_amphora_mock] _l7policy_mock = mock.MagicMock() _l7policy_mock.id = L7POLICY_ID", "_amphora_mock.ha_port_id = HA_PORT_ID _amphora_mock.vrrp_port_id = VRRP_PORT_ID _amphora_mock.role = AMP_ROLE _amphora_mock.vrrp_id", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update = (database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID,", "provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert no", "def test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2", "mock_listener_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value =", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy = database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with(", "mock_get_cert_exp): update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration() key = utils.get_six_compatible_server_certs_key_passphrase() fer = fernet.Fernet(key)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active", "the revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test", "Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_load_balancer.revert(self.loadbalancer_mock)", "update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7policy_repo_update.reset_mock()", "Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_active.revert(self.member_mock)", "repo.PoolRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self,", "self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority)", "mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete =", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy = database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mark_listener_pending_delete = (database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE) #", "id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip = LB_NET_IP", "repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)])", "def test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp):", "@mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with LB_ID", "Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) #", "the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule = database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID)", "_vip_mock get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer() result = get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create')", "= database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_busy=False) def test_mark_LB_active_in_db(self, mock_generate_uuid,", "mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR)", "= failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock)", "MEMBER_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "= database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) #", "vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG, mock_get_session,", "amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_busy=False) def test_mark_LB_active_in_db(self,", "@mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "POOL_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with no LB_ID", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete = (database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID,", "mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_pool_repo_update.reset_mock()", "( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update,", "mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp =", "Hewlett-Packard Development Company, L.P. # # Licensed under the Apache", "IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND,", "subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "'TEST', AMP_ID, status=constants.PENDING_UPDATE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {'loadbalancer_id':", "'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with(", "'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "listeners=listeners) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2,", "(database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE) # Test the", "MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip = LB_NET_IP mark_amp_ready_in_db", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete = (database_tasks.", "_amphora_mock.vrrp_port_id = VRRP_PORT_ID _amphora_mock.role = AMP_ROLE _amphora_mock.vrrp_id = VRRP_ID _amphora_mock.vrrp_priority", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp = database_tasks.ReloadAmphora() amp = reload_amp.execute(AMP_ID)", "LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listeners = [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')]", "mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self,", "listeners = [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active =", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert", "status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect =", "'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect =", "mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with", "See the # License for the specific language governing permissions", "mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG,", "= COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid',", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy = database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock)", "default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id,", "def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db", "mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active =", "2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, delay=1, timeout=2) # Test the revert", "mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id = LB_ID update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name':", "= [{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID: 'member4'}] redirect_pool = data_models.Pool(id='redirect_pool', members=members2) l7rules", "database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID) # Test the revert mock_l7policy_repo_delete.reset_mock()", "law or agreed to in writing, software # distributed under", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST',", "self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def", "exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR)", "AMP_ID, loadbalancer_id=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect =", "None) self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert", "revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) #", "role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "(database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) # Test the", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID,", "'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "listener2] pools = [default_pool, redirect_pool, unused_pool] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners,", "id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update = (database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "LB_ID self.member_mock = mock.MagicMock() self.member_mock.id = MEMBER_ID self.db_pool_mock = mock.MagicMock()", "from octavia.db import repositories as repo import octavia.tests.unit.base as base", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule = database_tasks.UpdateL7RuleInDB()", "database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock)", "test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete", "L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id, amp_id) amp_id =", "from sqlalchemy.orm import exc from taskflow.types import failure from octavia.common", "id=LB_ID, provisioning_status=constants.ERROR) # Test the revert LB_ID from listeners mock_loadbalancer_repo_update.reset_mock()", "repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls(", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock)", "= database_tasks.CreateAmphoraInDB() amp_id = create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False)", "= (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID,", "@mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class TestDatabaseTasks(base.TestCase): def", "AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "_listener_to_dict_mock = mock.MagicMock( return_value={'id': LISTENER_ID}) _listener_mock.id = LISTENER_ID _listener_mock.to_dict =", "database_tasks.UpdateAmphoraDBCertExpiration() key = utils.get_six_compatible_server_certs_key_passphrase() fer = fernet.Fernet(key) _pem_mock = fer.encrypt(", "= uuidutils.generate_uuid() amp2 = mock.MagicMock() amp2.id = uuidutils.generate_uuid() lb =", "map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test revert with", "MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) #", "= Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def", "'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon = database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay': 1, 'timeout': 2})", "'TEST', AMP_ID, role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid, mock_LOG,", "mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert no LB_ID mock_loadbalancer_repo_update.reset_mock()", "'TEST', id='AMP') # Test revert with exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect =", "= (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE) # Test the", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb = database_tasks.ReloadLoadBalancer() lb =", "mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test", "mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.DELETED) # Test the revert mock_listener_repo_update.reset_mock()", "exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR)", "data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies = l7policies listeners = [listener1, listener2] pools", "revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test revert with exception", "@mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "= database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID) # Test revert", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB()", "= Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def", "# Test the revert no LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [])", "mock_amphora_repo_update.reset_mock() mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY)", "result = get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid,", "self.loadbalancer_mock.id = LB_ID self.member_mock = mock.MagicMock() self.member_mock.id = MEMBER_ID self.db_pool_mock", "(database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert", "mock_amphora_repo_update.side_effect = Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock,", "mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) # Test", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id)", "Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) #", "the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "the revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test", "mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "AMP_ID = uuidutils.generate_uuid() COMPUTE_ID = uuidutils.generate_uuid() LB_ID = uuidutils.generate_uuid() SERVER_GROUP_ID", "amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mark_member_pending_update = (database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) #", "mock_loadbalancer_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_vip =", "return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session,", "{'type': constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH,", "mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy =", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create = (database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID,", "'TEST', MEMBER_ID, weight=1, ip_address='10.1.0.0') # Test the revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock)", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete = (database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock)", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create", "COMPUTE_FLAVOR = uuidutils.generate_uuid() _amphora_mock = mock.MagicMock() _amphora_mock.id = AMP_ID _amphora_mock.compute_id", "mock_amphora_repo_delete): update_members = database_tasks.UpdatePoolMembersOperatingStatusInDB() update_members.execute(POOL_ID, constants.ONLINE) mock_member_repo_update_pool_members.assert_called_once_with( 'TEST', POOL_ID, operating_status=constants.ONLINE)", "= mock.MagicMock() self.loadbalancer_mock.id = LB_ID self.member_mock = mock.MagicMock() self.member_mock.id =", "SUBNET_ID _vip_mock.ip_address = VIP_IP _vrrp_group_mock = mock.MagicMock() _cert_mock = mock.MagicMock()", "'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with no LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None,", "pools = [default_pool, redirect_pool, unused_pool] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools)", "mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect = Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR})", "VRRP_PORT_ID = uuidutils.generate_uuid() HA_PORT_ID = uuidutils.generate_uuid() L7POLICY_ID = uuidutils.generate_uuid() L7RULE_ID", "= database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.DELETED) # Test the", "revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete = (database_tasks.", "# # Licensed under the Apache License, Version 2.0 (the", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update = (database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock)", "amp = reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock)", "mock_amphora_repo_delete): mark_health_mon_pending_delete = (database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE)", "'TEST', id=L7POLICY_ID) # Test the revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff)", "id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) #", "SUBNET_ID = uuidutils.generate_uuid() VRRP_PORT_ID = uuidutils.generate_uuid() HA_PORT_ID = uuidutils.generate_uuid() L7POLICY_ID", "= Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid,", "= IMAGE_ID _compute_mock.compute_flavor = COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session',", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock)", "the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with(", "# Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID)", "@mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "HA_IP = '192.168.3.11' AMP_ROLE = 'FAKE_ROLE' VRRP_ID = random.randrange(255) VRRP_PRIORITY", "{'delay': 1, 'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, delay=1, timeout=2) #", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB())", "Test the revert with exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mock_listener_repo_update.reset_mock()", "test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create", "mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG,", "LB_NET_IP = '192.0.2.2' LISTENER_ID = uuidutils.generate_uuid() POOL_ID = uuidutils.generate_uuid() HM_ID", "provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect =", "mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete = (database_tasks. MarkMemberPendingDeleteInDB())", "provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)])", "= L7RULE_ID self.l7rule_mock.l7policy = self.l7policy_mock super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def", "def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F", "data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID)", "mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id,", "= LB_NET_IP mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_READY,", "mock_vip_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_lb_update, mock_listener_update, mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id =", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = (database_tasks. MarkLBAmphoraeDeletedInDB())", "mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete')", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_member = database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock,", "mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', #", "revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete = (database_tasks. MarkL7RulePendingDeleteInDB())", "mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def", "mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update,", "mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock)", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id =", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "mock_amp_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp =", "compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) def test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, update_dict) # Test the revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID)", "VRRP_IP = '172.16.31.10' HA_IP = '192.168.3.11' AMP_ROLE = 'FAKE_ROLE' VRRP_ID", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get,", "= Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid,", "= (database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE) # Test", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db = (database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "mark_pool_active = (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.ACTIVE) # Test", "exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR)", "utils.get_six_compatible_server_certs_key_passphrase() fer = fernet.Fernet(key) _pem_mock = fer.encrypt( utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id,", "mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update')", "exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR)", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID)", "on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS", "@mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update,", "mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete =", "mock_amphora_repo_delete): _amphora_mock.lb_network_ip = LB_NET_IP mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_member_repo_update.reset_mock()", "id=POOL_ID) # Test the revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) # TODO(johnsom) Fix", "the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID,", "MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert", "the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with(", "= (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE) # Test the", "uuidutils.generate_uuid() POOL_ID = uuidutils.generate_uuid() HM_ID = uuidutils.generate_uuid() MEMBER_ID = uuidutils.generate_uuid()", "= mock.MagicMock() amp2.id = uuidutils.generate_uuid() lb = mock.MagicMock() lb.amphorae =", "mark_amp_pending_delete_in_db = (database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_DELETE) #", "= '172.16.31.10' HA_IP = '192.168.3.11' AMP_ROLE = 'FAKE_ROLE' VRRP_ID =", "the revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB()", "mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock)", "id='AMP') # Test revert with exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect = Exception('fail')", "database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.DELETED) # Test the revert", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock)", "the revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test", "the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG,", "amp_id) # Test the revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called)", "Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def", "[mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1,", "= database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring( self,", "mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count)", "mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls(", "status=constants.PENDING_UPDATE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID,", "MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert", "the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "mock.MagicMock() self.health_mon_mock.id = HM_ID self.health_mon_mock.pool_id = POOL_ID self.listener_mock = mock.MagicMock()", "[mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST',", "= get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id)", "listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2,", "= CACHED_ZONE _compute_mock.image_id = IMAGE_ID _compute_mock.compute_flavor = COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update')", "{'name': 'test', 'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') #", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete = (database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST',", "compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID: POOL_ID, } self.l7policy_mock = mock.MagicMock() self.l7policy_mock.id =", "update_dict = {'name': 'test', 'description': 'test2', 'session_persistence': sp_dict} update_pool =", "id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule = database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID) #", "Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) #", "mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_listener = database_tasks.UpdateListenerInDB() listener_dict =", "[listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration',", "def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "LB_ID, None) self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test", "revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id)", "permissions and limitations # under the License. # import random", "MEMBER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the", "@mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class", "result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "Test the revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def", "repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) # Test the revert with", "= Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid,", "MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.DELETED)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = ( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST',", "= Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db(", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG,", "= ( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) def test_update_lb_server_group_in_db(self,", "description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool') def test_update_listener_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid,", "{'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) # Test the revert", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID)", "mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete =", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listeners = [data_models.Listener(id='listener1'),", "self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy", "= HM_ID self.health_mon_mock.pool_id = POOL_ID self.listener_mock = mock.MagicMock() self.listener_mock.id =", "mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update, mock_generate_uuid,", "def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "(database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) # Test the", "the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test", "mock_amphora_repo_delete): update_amphora_info = database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, lb_network_ip=LB_NET_IP,", "# Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update = (database_tasks.", "mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG,", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert LB_ID from", "pools=pools) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2,", "mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the", "# MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID,", "mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info =", "mock_amphora_repo_delete): mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE)", "test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update", "= Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def", "# operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "'TEST', id=MEMBER_ID) # Test the revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) # TODO(johnsom)", "mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid,", "the revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test", "repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule = database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH,", "mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7policy_repo_update.reset_mock()", "mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock)", "health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count)", "mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with", "revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) # Test", "AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid,", "@mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create =", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule = database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST',", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete = (database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with(", "def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete", "return_value=_cert_mock) def test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete,", "mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID)", "_amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) # Test the revert", "mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def", "mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG,", "# Test the revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR)", "= l7policies listeners = [listener1, listener2] pools = [default_pool, redirect_pool,", "= HA_PORT_ID _amphora_mock.vrrp_port_id = VRRP_PORT_ID _amphora_mock.role = AMP_ROLE _amphora_mock.vrrp_id =", "once provisioning status added # repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST', # POOL_ID,", "= Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def", "create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') # Test revert with exception", "L.P. # # Licensed under the Apache License, Version 2.0", "= database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay': 1, 'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID,", "exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.'", "def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) #", "mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with", "mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert", "mock.MagicMock() _compute_mock.lb_network_ip = LB_NET_IP _compute_mock.cached_zone = CACHED_ZONE _compute_mock.image_id = IMAGE_ID", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = ( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete = (database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR)", "= Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self,", "limitations # under the License. # import random from cryptography", "test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value", "def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID) # Test the revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock)", "revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) # Test the revert with exception", "Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "IMAGE_ID _compute_mock.compute_flavor = COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST')", "= { constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID: POOL_ID, } self.l7policy_mock = mock.MagicMock()", "mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db", "[mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST',", "revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) # Test the", "Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock)", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update,", "mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool') def test_update_listener_in_db(self,", "test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "@mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class TestDatabaseTasks(base.TestCase): def setUp(self): self.health_mon_mock = mock.MagicMock() self.health_mon_mock.id", "mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) #", "'TEST', MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect =", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp1 = mock.MagicMock() amp1.id = uuidutils.generate_uuid() amp2", "= _listener_mock _loadbalancer_mock.listeners = [_listener_mock] get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer() result =", "compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock,", "# Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test", "server_group_id=SERVER_GROUP_ID) # Test the revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) # Test", "# Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR)", "provisioning_status=constants.PENDING_CREATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID,", "AMP_ROLE = 'FAKE_ROLE' VRRP_ID = random.randrange(255) VRRP_PRIORITY = random.randrange(100) CACHED_ZONE", "AMP_ROLE _amphora_mock.vrrp_id = VRRP_ID _amphora_mock.vrrp_priority = VRRP_PRIORITY _amphorae = [_amphora_mock]", "repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # TODO(johnsom) fix once provisioning status", "@mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID) # Test revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None)", "TODO(johnsom) Fix # repo.MemberRepository.delete.assert_called_once_with( # 'TEST', # MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def", "health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count)", "exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID,", "TODO(johnsom) fix once provisioning status added # repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST',", "mark_member_active = (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE) # Test", "'172.16.31.10' HA_IP = '192.168.3.11' AMP_ROLE = 'FAKE_ROLE' VRRP_ID = random.randrange(255)", "import uuidutils from sqlalchemy.orm import exc from taskflow.types import failure", "database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1)", "mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None,", "listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active", "mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def test_update_amphora_vip_data(self, mock_generate_uuid,", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = (database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID,", "id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "_amphora_mock.lb_network_ip = LB_NET_IP _amphora_mock.vrrp_ip = VRRP_IP _amphora_mock.ha_ip = HA_IP _amphora_mock.ha_port_id", "Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock)", "id=AMP_ID, status=constants.ERROR) def test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "= database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID,", "# Test the revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) # TODO(johnsom) Fix #", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict = {'type': 'SOURCE_IP', 'cookie_name': None} update_dict", "@mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def test_create_amphora_in_db(self, mock_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "OF ANY KIND, either express or implied. See the #", "uuidutils.generate_uuid() SERVER_GROUP_ID = uuidutils.generate_uuid() LB_NET_IP = '192.0.2.2' LISTENER_ID = uuidutils.generate_uuid()", "the revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( #", "database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID) # Test the revert", "unused_pool = data_models.Pool(id='unused_pool') members1 = [{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID: 'member2'}] health_monitor", "exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect = Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status':", "Test the revert LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict])", "in writing, software # distributed under the License is distributed", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create = (database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock)", "update_l7rule.execute( self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST',", "ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "# repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def", "def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def", "[_amphora_mock] _l7policy_mock = mock.MagicMock() _l7policy_mock.id = L7POLICY_ID _l7rule_mock = mock.MagicMock()", "l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self,", "mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db = (database_tasks.", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = (", "the License. # import random from cryptography import fernet import", "'SOURCE_IP', 'cookie_name': None} update_dict = {'name': 'test', 'description': 'test2', 'session_persistence':", "[listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) #", "MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "provisioning status added # repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, #", "def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict", "LB_ID = uuidutils.generate_uuid() SERVER_GROUP_ID = uuidutils.generate_uuid() LB_NET_IP = '192.0.2.2' LISTENER_ID", "loadbalancer = update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID,", "with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID,", "the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with(", "HM_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self,", "def test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active", "database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete,", "provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID,", "mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert", "# Copyright 2015 Hewlett-Packard Development Company, L.P. # # Licensed", "_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert", "id=AMP_ID) self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid, mock_LOG,", "status=constants.PENDING_DELETE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "the revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test", "id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "role=None, vrrp_priority=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect =", "as odb_exceptions from oslo_utils import uuidutils from sqlalchemy.orm import exc", "self.health_mon_mock self.member_mock = { constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID: POOL_ID, } self.l7policy_mock", "= database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) #", "_compute_mock.lb_network_ip = LB_NET_IP _compute_mock.cached_zone = CACHED_ZONE _compute_mock.image_id = IMAGE_ID _compute_mock.compute_flavor", "= Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self,", "= get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid, mock_LOG,", "'TEST', LB_ID, None) self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) #", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create = (database_tasks.", "exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR)", "mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update,", "repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect", "<gh_stars>0 # Copyright 2015 Hewlett-Packard Development Company, L.P. # #", "with the License. You may obtain # a copy of", "update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) # Test the revert", "test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete", "revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the", "id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "2015 Hewlett-Packard Development Company, L.P. # # Licensed under the", "mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_member_repo_update.reset_mock()", "= HA_IP _amphora_mock.ha_port_id = HA_PORT_ID _amphora_mock.vrrp_port_id = VRRP_PORT_ID _amphora_mock.role =", "governing permissions and limitations # under the License. # import", "LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID) # Test the revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID:", "[exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete,", "delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update,", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active = (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE)", "provisioning_status=constants.ACTIVE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID,", "= Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get')", "AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called)", "@mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session,", "= [_amphora_mock] _l7policy_mock = mock.MagicMock() _l7policy_mock.id = L7POLICY_ID _l7rule_mock =", "mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch(", "mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip = LB_NET_IP mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB()", "mock_listener_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener =", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info = database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def", "from octavia.controller.worker.v2.tasks import database_tasks from octavia.db import repositories as repo", "Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with LB_ID from", "mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the", "database_tasks.GetAmphoraDetails() new_amp = get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db = database_tasks.CreateAmphoraInDB() amp_id = create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with(", "'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "@mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "new_amp = get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID,", "@mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG,", "self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "revert with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with(", "@mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_amphora_repo_delete): mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.DELETED) #", "def test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db", "exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR)", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update')", "= [data_models.L7Rule(id='rule1')] redirect_policy = data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules) l7policies = [redirect_policy]", "mock_amphora_repo_delete): mark_member_active = (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE) #", "database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1)", "Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) def test_get_amphora_details(self, mock_generate_uuid, mock_LOG,", "= ( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active = (database_tasks.MarkPoolActiveInDB())", "Test revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) # Test revert with", "repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool') def test_update_listener_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id,", "'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock()", "mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID)", "'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST',", "self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1,", "timeout=2) # Test the revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID,", "mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert", "{constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict])", "test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) # Test the revert", "mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test Not Found", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "mock.MagicMock() _l7rule_mock.id = L7RULE_ID _listener_mock = mock.MagicMock() _listener_to_dict_mock = mock.MagicMock(", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {'loadbalancer_id': LB_ID} mark_loadbalancer_active", "AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mock_amphora_repo_delete): mark_busy = ( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True)", "except in compliance with the License. You may obtain #", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert", "mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with", "mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create =", "'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG,", "CACHED_ZONE _compute_mock.image_id = IMAGE_ID _compute_mock.compute_flavor = COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update')", "disable_amp_health = ( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def", "def test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate',", "L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test", "Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update,", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE)", "MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_UPDATE) # Test the revert", "mock.MagicMock( return_value={'id': LISTENER_ID}) _listener_mock.id = LISTENER_ID _listener_mock.to_dict = _listener_to_dict_mock _tf_failure_mock", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete,", "update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with", "# distributed under the License is distributed on an \"AS", "'test', 'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') # Test", "# Unless required by applicable law or agreed to in", "mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update = (database_tasks.", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update = (database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock)", "provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count)", "is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES", "mock_list_not_error, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict =", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db = (database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert", "map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, None) self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id)", "self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update')", "id=LB_ID, server_group_id=SERVER_GROUP_ID) # Test the revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) #", "mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert", "provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp1 = mock.MagicMock() amp1.id =", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_DELETE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock)", "Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def", "with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID,", "PORT_ID = uuidutils.generate_uuid() SUBNET_ID = uuidutils.generate_uuid() VRRP_PORT_ID = uuidutils.generate_uuid() HA_PORT_ID", "mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_UPDATE) # Test the revert mock_amphora_repo_update.reset_mock()", "mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amphora_vip_data2(self,", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self,", "advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "update_pool = database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, update_dict) #", "'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_amphora_repo_delete.side_effect = Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def", "status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create = (database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "@mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip = LB_NET_IP mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock)", "@mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "= Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update,", "mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert", "'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID,", "def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "return_value=_amphora_mock) def test_create_amphora_in_db(self, mock_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG,", "repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id,", "Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update,", "mock_listener_repo_update.assert_not_called() # Test the revert with exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect =", "= (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test", "# Test the revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) # TODO(johnsom) Fix #", "the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) # Test", "LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create = (database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self, mock_pool_repo_update,", "Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) #", "test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active = (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock)", "Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self, mock_listener_repo_delete,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration() key", "self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def test_create_amphora_in_db(self, mock_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "= L7RULE_ID _listener_mock = mock.MagicMock() _listener_to_dict_mock = mock.MagicMock( return_value={'id': LISTENER_ID})", "@mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict = {'type': 'SOURCE_IP',", "import octavia.tests.unit.base as base AMP_ID = uuidutils.generate_uuid() COMPUTE_ID = uuidutils.generate_uuid()", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp1 = mock.MagicMock() amp1.id", "Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) #", "= uuidutils.generate_uuid() SUBNET_ID = uuidutils.generate_uuid() VRRP_PORT_ID = uuidutils.generate_uuid() HA_PORT_ID =", "specific language governing permissions and limitations # under the License.", "test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool = data_models.Pool(id='unused_pool') members1 = [{constants.MEMBER_ID: 'member1'},", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test revert with exception repo.LoadBalancerRepository.update.reset_mock()", "health_monitor=health_monitor) listener1 = data_models.Listener(id='listener1', default_pool=default_pool) members2 = [{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID:", "# not use this file except in compliance with the", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active = (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "= Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self,", "'zone1' IMAGE_ID = uuidutils.generate_uuid() COMPUTE_FLAVOR = uuidutils.generate_uuid() _amphora_mock = mock.MagicMock()", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update = (database_tasks. MarkPoolPendingUpdateInDB())", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG,", "exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR)", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with(", "_l7rule_mock.id = L7RULE_ID _listener_mock = mock.MagicMock() _listener_to_dict_mock = mock.MagicMock( return_value={'id':", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj,", "the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_members = database_tasks.UpdatePoolMembersOperatingStatusInDB() update_members.execute(POOL_ID,", "} self.l7policy_mock = mock.MagicMock() self.l7policy_mock.id = L7POLICY_ID self.l7rule_mock = mock.MagicMock()", "= Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock)", "AMP_ID, cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0,", "provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID,", "= Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self,", "Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self,", "vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb", "[data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb)", "mock_amphora_repo_delete): mark_l7policy_pending_delete = (database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE)", "test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp", "Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self, mock_health_mon_repo_update,", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE)", "return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class TestDatabaseTasks(base.TestCase): def setUp(self): self.health_mon_mock =", "exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "@mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get, mock_generate_uuid, mock_LOG, mock_get_session,", "vrrp_id=1) def test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_amphora_repo_delete): get_amp_details = database_tasks.GetAmphoraDetails() new_amp = get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP,", "utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self,", "mock_listener_update, mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id = LB_ID update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock,", "mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() # Test the revert with exceptions", "with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID,", "test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb =", "# Test revert with exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect = Exception('fail') create_amp_in_db.revert(result='AMP')", "= database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\",", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update = (database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID)", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST',", "vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id( self, mock_associate,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create =", "under the License is distributed on an \"AS IS\" BASIS,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete = (database_tasks. MarkLBPendingDeleteInDB())", "mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update, mock_generate_uuid,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "provisioning_status=constants.ERROR) # Test the revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') update_l7rule.revert(self.l7rule_mock)", "mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def", "revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "with LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict = {constants.LISTENER_ID: LISTENER_ID,", "Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock,", "'test2', 'session_persistence': sp_dict} update_pool = database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST',", "# Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG,", "mock_amphora_repo_delete): unused_pool = data_models.Pool(id='unused_pool') members1 = [{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID: 'member2'}]", "the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "uuidutils from sqlalchemy.orm import exc from taskflow.types import failure from", "lb = reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock)", "this file except in compliance with the License. You may", "POOL_ID self.listener_mock = mock.MagicMock() self.listener_mock.id = LISTENER_ID self.loadbalancer_mock = mock.MagicMock()", "update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with(", "revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the", "self.db_pool_mock.health_monitor = self.health_mon_mock self.member_mock = { constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID: POOL_ID,", "def test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "self, mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_members", "HA_PORT_ID = uuidutils.generate_uuid() L7POLICY_ID = uuidutils.generate_uuid() L7RULE_ID = uuidutils.generate_uuid() VIP_IP", "database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, update_dict) # Test the", "_amphora_mock.lb_network_ip = LB_NET_IP mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB())", "data_models.Listener(id='listener2')] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with(", "# Test the revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR)", "mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_member = database_tasks.DeleteMemberInDB()", "the revert with exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect = Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with(", "= Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db(", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_listener = database_tasks.UpdateListenerInDB()", "mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update')", "Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self, mock_member_repo_update,", "data_models.HealthMonitor(id='hm1') default_pool = data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor) listener1 = data_models.Listener(id='listener1', default_pool=default_pool)", "# Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail')", "Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) #", "# Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR)", "test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details =", "provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID,", "mock_amphora_repo_delete): reload_lb = database_tasks.ReloadLoadBalancer() lb = reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID)", "@mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2,", "return_value=_amphora_mock) def test_reload_amphora(self, mock_amp_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "# Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "= database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert", "self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None,", "file except in compliance with the License. You may obtain", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "(database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE) # Test the", "revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details = database_tasks.GetAmphoraDetails() new_amp =", "compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete", "Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete')", "OR CONDITIONS OF ANY KIND, either express or implied. See", "VRRP_PRIORITY _amphorae = [_amphora_mock] _loadbalancer_mock = mock.MagicMock() _loadbalancer_mock.id = LB_ID", "@mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "def test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP)", "mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) def", "revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule = database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock,", "self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR)", "= uuidutils.generate_uuid() MEMBER_ID = uuidutils.generate_uuid() PORT_ID = uuidutils.generate_uuid() SUBNET_ID =", "side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock)", "[_amphora_mock, None] get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer() result = get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result)", "COMPUTE_ID = uuidutils.generate_uuid() LB_ID = uuidutils.generate_uuid() SERVER_GROUP_ID = uuidutils.generate_uuid() LB_NET_IP", "mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool", "repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict = {'type':", "= L7POLICY_ID _l7rule_mock = mock.MagicMock() _l7rule_mock.id = L7RULE_ID _listener_mock =", "'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG,", "AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id( self,", "repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') # Test the revert", "Test the revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) #", "mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update, mock_generate_uuid,", "revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the", "= mock.Mock(spec=failure.Failure) _vip_mock = mock.MagicMock() _vip_mock.port_id = PORT_ID _vip_mock.subnet_id =", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete = (database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID,", "default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id,", "LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID) # Test revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID,", "mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.'", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID,", "database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, None) self.assertEqual(_amphora_mock.id, amp_id)", "compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {constants.LISTENER_ID: LISTENER_ID,", "AMP_ID _amphora_mock.compute_id = COMPUTE_ID _amphora_mock.lb_network_ip = LB_NET_IP _amphora_mock.vrrp_ip = VRRP_IP", "Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_active.revert(self.l7rule_mock)", "= LB_ID _loadbalancer_mock.amphorae = [_amphora_mock] _l7policy_mock = mock.MagicMock() _l7policy_mock.id =", "the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test", "[mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with(", "the revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID,", "lb_network_ip=LB_NET_IP) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect =", "update_server_group_info = database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID) #", "self.listener_mock.id = LISTENER_ID self.loadbalancer_mock = mock.MagicMock() self.loadbalancer_mock.id = LB_ID self.member_mock", "mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7policy_repo_update.reset_mock()", "the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with(", "test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted =", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db = database_tasks.CreateAmphoraInDB() amp_id =", "'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) # Test the revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with(", "the revert no LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called()", "mock_amphora_repo_delete): mark_health_mon_pending_update = (database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE)", "sp_dict = {'type': 'SOURCE_IP', 'cookie_name': None} update_dict = {'name': 'test',", "def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST',", "= SUBNET_ID _vip_mock.ip_address = VIP_IP _vrrp_group_mock = mock.MagicMock() _cert_mock =", "Test the revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) #", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete = (database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID,", "Test the revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) #", "mock_amphora_repo_delete): update_l7rule = database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH,", "operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def test_reload_amphora(self, mock_amp_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "Test the revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID,", "1, 'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, delay=1, timeout=2) # Test", "mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool =", "Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_active.revert(POOL_ID)", "LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with(", "database_tasks from octavia.db import repositories as repo import octavia.tests.unit.base as", "database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID) # Test the revert mock_l7rule_repo_delete.reset_mock()", "'TEST', # MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = (database_tasks.", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create = (database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID,", "'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session,", "test_get_amphora_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details =", "with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool = data_models.Pool(id='unused_pool') members1 = [{constants.MEMBER_ID:", "vrrp_id=1) def test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls(", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData()", "= (database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE) # Test", "repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls(", "def test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip", "= data_models.HealthMonitor(id='hm1') default_pool = data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor) listener1 = data_models.Listener(id='listener1',", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db = database_tasks.CreateAmphoraInDB() amp_id", "Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self, mock_member_repo_update,", "def test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create = (database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID,", "= (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.ACTIVE) # Test the", "delete_member = database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID) # Test the", "def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with(", "mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count)", "'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, delay=1, timeout=2) # Test the", "map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'})", "# Test the revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) # Test the", "mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update,", "update_listener = database_tasks.UpdateListenerInDB() listener_dict = {constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict, {'name': 'test',", "{'provisioning_status': constants.ERROR}) # Test the revert with exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect", "'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool = database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST',", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value = _listener_mock _loadbalancer_mock.listeners =", "_vrrp_group_mock = mock.MagicMock() _cert_mock = mock.MagicMock() _compute_mock = mock.MagicMock() _compute_mock.lb_network_ip", "writing, software # distributed under the License is distributed on", "test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data =", "the revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, weight=1, ip_address='10.1.0.0') # Test the", "mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', #", "the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR)", "l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb)", "'TEST', AMP_ID, status=constants.PENDING_DELETE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock()", "the License. You may obtain # a copy of the", "def test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb = database_tasks.ReloadLoadBalancer()", "use this file except in compliance with the License. You", "test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete =", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_busy=False) def test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "redirect_pool=redirect_pool, l7rules=l7rules) l7policies = [redirect_policy] listener2 = data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update = (database_tasks. MarkMemberPendingUpdateInDB())", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID,", "test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_members = database_tasks.UpdatePoolMembersOperatingStatusInDB()", "disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete, mock_generate_uuid,", "mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.DELETED) # Test the revert mock_loadbalancer_repo_update.reset_mock()", "update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2', 'vip': {'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0,", "[listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with", "provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock()", "mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete =", "def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock)", "mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "taskflow.types import failure from octavia.common import constants from octavia.common import", "mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "'TEST', id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get,", "Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG,", "'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def", "= data_models.Listener(id='listener1', default_pool=default_pool) members2 = [{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID: 'member4'}] redirect_pool", "LISTENER_ID} update_listener.execute(listener_dict, {'name': 'test', 'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, name='test',", "LISTENER_ID, name='test', description='test2') # Test the revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with(", "import mock from oslo_db import exception as odb_exceptions from oslo_utils", "'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amp_failover_details(self, mock_generate_uuid,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "# Test the revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID,", "uuidutils.generate_uuid() L7RULE_ID = uuidutils.generate_uuid() VIP_IP = '192.168.127.12' VRRP_IP = '172.16.31.10'", "mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self,", "mock_loadbalancer_repo_update.side_effect = Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid,", "mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert", "mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon =", "Test the revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with(", "listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with(", "mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid,", "= '192.168.3.11' AMP_ROLE = 'FAKE_ROLE' VRRP_ID = random.randrange(255) VRRP_PRIORITY =", "mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete =", "mock_amphora_repo_delete): mark_pool_pending_update = (database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete = (database_tasks.", "# Test with LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict =", "mock.MagicMock() _loadbalancer_mock.id = LB_ID _loadbalancer_mock.amphorae = [_amphora_mock] _l7policy_mock = mock.MagicMock()", "mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid,", "database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test',", "express or implied. See the # License for the specific", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_loadbalancer_repo_update.side_effect = Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with(", "# 'TEST', # MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid, mock_LOG,", "'TEST', AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert mock_amphora_repo_update.reset_mock()", "update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) # Test the revert with", "LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID,", "POOL_ID, } self.l7policy_mock = mock.MagicMock() self.l7policy_mock.id = L7POLICY_ID self.l7rule_mock =", "_listener_mock.to_dict = _listener_to_dict_mock _tf_failure_mock = mock.Mock(spec=failure.Failure) _vip_mock = mock.MagicMock() _vip_mock.port_id", "the Apache License, Version 2.0 (the \"License\"); you may #", "_pem_mock = fer.encrypt( utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_listener = database_tasks.UpdateListenerInDB() listener_dict = {constants.LISTENER_ID:", "# Test the revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR)", "= Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get')", "l7policies=l7policies) listener2.l7policies = l7policies listeners = [listener1, listener2] pools =", "role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "L7POLICY_ID = uuidutils.generate_uuid() L7RULE_ID = uuidutils.generate_uuid() VIP_IP = '192.168.127.12' VRRP_IP", "= {'type': 'SOURCE_IP', 'cookie_name': None} update_dict = {'name': 'test', 'description':", "# Test the revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR)", "mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect = Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete')", ") update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid,", "Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock)", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "no LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called() # Test the revert", "provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def test_update_amphora_db_cert_exp(self, mock_generate_uuid,", "@mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "[mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update')", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active = database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE)", "repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "= mock.MagicMock() self.l7rule_mock.id = L7RULE_ID self.l7rule_mock.l7policy = self.l7policy_mock super(TestDatabaseTasks, self).setUp()", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete = (database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "delete_member.revert(self.member_mock) # TODO(johnsom) Fix # repo.MemberRepository.delete.assert_called_once_with( # 'TEST', # MEMBER_ID)", "weight=1, ip_address='10.1.0.0') # Test the revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST',", "Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) #", "( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) def test_update_lb_server_group_in_db(self, mock_generate_uuid,", "'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') # Test the revert", "MEMBER_ID, constants.POOL_ID: POOL_ID, } self.l7policy_mock = mock.MagicMock() self.l7policy_mock.id = L7POLICY_ID", "revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with(", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted = database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.DELETED)", "LB_ID, provisioning_status=constants.ACTIVE) # Test with no LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, [])", "test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy", "test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active", "= Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG,", "Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) #", "mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the", "with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID,", "@mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID,", "update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID)", "mark_amp_deleted_in_db = (database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get',", "def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "amphora_id=AMP_ID) # Test revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) # Test", "_amphora_mock.role = AMP_ROLE _amphora_mock.vrrp_id = VRRP_ID _amphora_mock.vrrp_priority = VRRP_PRIORITY _amphorae", "mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test", "repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock)", "mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self,", "MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail')", "= (database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE) # Test", "vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock)", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration() key =", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {'loadbalancer_id': LB_ID} mark_loadbalancer_active =", "mock.MagicMock() _cert_mock = mock.MagicMock() _compute_mock = mock.MagicMock() _compute_mock.lb_network_ip = LB_NET_IP", "L7RULE_ID _listener_mock = mock.MagicMock() _listener_to_dict_mock = mock.MagicMock( return_value={'id': LISTENER_ID}) _listener_mock.id", "mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid,", "mock_amphora_repo_update, mock_amphora_repo_delete): update_vip = database_tasks.UpdateVIPAfterAllocation() loadbalancer = update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock,", "'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) # Test the revert with exception", "with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "= Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None])", "mock_amphora_repo_delete): listener_dict = {'loadbalancer_id': LB_ID} mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with(", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = database_tasks.MarkAmphoraHealthBusy()", "mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock)", "'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self, mock_vip_update,", "mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert", "TODO(johnsom) Fix # repo.PoolRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # operating_status=constants.ERROR)", "mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID,", "'TEST', LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) #", "mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP,", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "[amp1, amp2] mock_amphora_get.side_effect = [_amphora_mock, None] get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer() result", "test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "= fernet.Fernet(key) _pem_mock = fer.encrypt( utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with(", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete = (database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID,", "repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self,", "with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID,", "_amphora_mock.ha_ip = HA_IP _amphora_mock.ha_port_id = HA_PORT_ID _amphora_mock.vrrp_port_id = VRRP_PORT_ID _amphora_mock.role", "map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID,", "'FAKE_ROLE' VRRP_ID = random.randrange(255) VRRP_PRIORITY = random.randrange(100) CACHED_ZONE = 'zone1'", "mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_loadbalancer_repo_update.reset_mock()", "repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect", "Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) #", "Company, L.P. # # Licensed under the Apache License, Version", "L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) # Test the revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST',", "mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "= AMP_ID _amphora_mock.compute_id = COMPUTE_ID _amphora_mock.lb_network_ip = LB_NET_IP _amphora_mock.vrrp_ip =", "revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') #", "uuidutils.generate_uuid() LB_NET_IP = '192.0.2.2' LISTENER_ID = uuidutils.generate_uuid() POOL_ID = uuidutils.generate_uuid()", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG,", "VIP_IP _vrrp_group_mock = mock.MagicMock() _cert_mock = mock.MagicMock() _compute_mock = mock.MagicMock()", "# TODO(johnsom) fix once provisioning status added # repo.HealthMonitorRepository.update.assert_called_once_with( #", "mock_amphora_repo_delete): reload_amp = database_tasks.ReloadAmphora() amp = reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID)", "exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR)", "Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock)", "mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db =", "= [{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID: 'member2'}] health_monitor = data_models.HealthMonitor(id='hm1') default_pool =", "update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST',", "provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)])", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {constants.LISTENER_ID:", "mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with", "# Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) # Test the", "AMP_ID, status=constants.PENDING_UPDATE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mock_amphora_repo_delete): mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY)", "mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect", "= {'name': 'test', 'description': 'test2', 'session_persistence': sp_dict} update_pool = database_tasks.UpdatePoolInDB()", "LB_ID} mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID,", "repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count)", "the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count)", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID)", "= VRRP_PRIORITY _amphorae = [_amphora_mock] _loadbalancer_mock = mock.MagicMock() _loadbalancer_mock.id =", "mock_generate_uuid, mock_LOG, mock_get_session, mock_lb_update, mock_listener_update, mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id = LB_ID", "[_amphora_mock] _loadbalancer_mock = mock.MagicMock() _loadbalancer_mock.id = LB_ID _loadbalancer_mock.amphorae = [_amphora_mock]", "mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.DELETED) # Test the revert mock_amphora_repo_update.reset_mock()", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy = database_tasks.UpdateL7PolicyInDB()", "revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test revert", "provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID,", "repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update')", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update = (database_tasks.", "odb_exceptions from oslo_utils import uuidutils from sqlalchemy.orm import exc from", "MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert", "mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update =", "mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj", "data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor) listener1 = data_models.Listener(id='listener1', default_pool=default_pool) members2 = [{constants.MEMBER_ID:", "Test the revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "as base AMP_ID = uuidutils.generate_uuid() COMPUTE_ID = uuidutils.generate_uuid() LB_ID =", "def test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "compute_id=COMPUTE_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with(", "repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self,", "License for the specific language governing permissions and limitations #", "id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG, mock_get_session,", "Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_active.revert(self.health_mon_mock)", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert", "POOL_ID, update_dict) # Test the revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST',", "[]) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() # Test the revert with exceptions mock_loadbalancer_repo_update.reset_mock()", "get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result) def test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG,", "revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the", "mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_health_mon_repo_update.reset_mock()", "mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active =", "sqlalchemy.orm import exc from taskflow.types import failure from octavia.common import", "= uuidutils.generate_uuid() lb = mock.MagicMock() lb.amphorae = [amp1, amp2] mock_amphora_get.side_effect", "self.health_mon_mock = mock.MagicMock() self.health_mon_mock.id = HM_ID self.health_mon_mock.pool_id = POOL_ID self.listener_mock", "revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "provisioning_status=constants.PENDING_DELETE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID,", "# Test revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) # Test revert", "# Test the revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR)", "database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.DELETED) # Test the revert", "= PORT_ID _vip_mock.subnet_id = SUBNET_ID _vip_mock.ip_address = VIP_IP _vrrp_group_mock =", "repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id)", "provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE),", "mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_booting_in_db(self, mock_generate_uuid,", "mock_amphora_repo_delete): delete_listener = database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID) #", "utils from octavia.controller.worker.v2.tasks import database_tasks from octavia.db import repositories as", "= database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\",", "delete_pool.revert(POOL_ID) # TODO(johnsom) Fix # repo.PoolRepository.update.assert_called_once_with( # 'TEST', # POOL_ID,", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete = (database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID,", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR),", "[redirect_policy] listener2 = data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies = l7policies listeners =", "super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def test_create_amphora_in_db(self, mock_create, mock_generate_uuid, mock_LOG, mock_get_session,", "test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = ( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock)", "mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID)", "'192.0.2.2' LISTENER_ID = uuidutils.generate_uuid() POOL_ID = uuidutils.generate_uuid() HM_ID = uuidutils.generate_uuid()", "database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST',", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip = _vip_mock get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer()", "return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "None]) def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "mock_amphora_repo_delete): mark_pool_active = (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.ACTIVE) #", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db = (database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "= Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db(", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id,", "vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock()", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB())", "= (database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE) # Test", "mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with", "mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with", "'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect =", "MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert", "'TEST', AMP_ID, status=constants.DELETED) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "HM_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "sp_dict} update_pool = database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, update_dict)", "mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') # Test the revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock)", "database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with(", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock)", "mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with", "= database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID) # Test the", "@mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete = (database_tasks. MarkL7PolicyPendingDeleteInDB())", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active = database_tasks.MarkLBActiveInDB()", "octavia.common import utils from octavia.controller.worker.v2.tasks import database_tasks from octavia.db import", "= VRRP_IP _amphora_mock.ha_ip = HA_IP _amphora_mock.ha_port_id = HA_PORT_ID _amphora_mock.vrrp_port_id =", "loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def test_update_amphora_vip_data(self,", "fix once provisioning status added # repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST', #", "exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR)", "mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active =", "AMP_ID, role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid, mock_LOG, mock_get_session,", "update_health_mon = database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay': 1, 'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST',", "MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = (database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with(", "the # License for the specific language governing permissions and", "return_value=_loadbalancer_mock) def test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "members1 = [{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID: 'member2'}] health_monitor = data_models.HealthMonitor(id='hm1') default_pool", "mock_listener_repo_update.side_effect = Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self,", "repo.MemberRepository.delete.assert_called_once_with( # 'TEST', # MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid,", "revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip = LB_NET_IP mark_amp_ready_in_db =", "mock_amphora_repo_delete): mark_amp_pending_delete_in_db = (database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_DELETE)", "provisioning_status=constants.ERROR) # Test the revert LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock()", "update_l7policy = database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT)", "self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "(database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) # Test the", "load_balancer_id=LB_ID, amphora_id=AMP_ID) # Test revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) #", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener = database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID})", "revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "= mock.MagicMock() _listener_to_dict_mock = mock.MagicMock( return_value={'id': LISTENER_ID}) _listener_mock.id = LISTENER_ID", "self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "mark_amp_pending_update_in_db = (database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_UPDATE) #", "repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, delay=1, timeout=2) # Test the revert mock_health_mon_repo_update.reset_mock()", "(database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE) # Test the", "'TEST', HM_ID, delay=1, timeout=2) # Test the revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock)", "exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id)", "database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_busy=False) def test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG,", "status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id)", "# Test the revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with(", "mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def", "mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details = database_tasks.GetAmphoraDetails() new_amp = get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id)", "provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id,", "test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "= VRRP_ID _amphora_mock.vrrp_priority = VRRP_PRIORITY _amphorae = [_amphora_mock] _loadbalancer_mock =", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active = (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID)", "mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_pool_repo_update.reset_mock()", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info = database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID)", "test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update", "mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID) # Test the revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID)", "def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock)", "under the License. # import random from cryptography import fernet", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate,", "LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test", "the revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) # TODO(johnsom) Fix # repo.PoolRepository.update.assert_called_once_with( #", "mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update')", "Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db( self,", "mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update,", "'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock()", "POOL_ID, {'provisioning_status': constants.ERROR}) # Test the revert with exception mock_repos_pool_update.reset_mock()", "with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID,", "fernet import mock from oslo_db import exception as odb_exceptions from", "mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_health_mon_repo_update.reset_mock()", "Test with no LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called() # Test", "Test the revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) # TODO(johnsom) Fix # repo.MemberRepository.delete.assert_called_once_with(", "octavia.common import data_models from octavia.common import utils from octavia.controller.worker.v2.tasks import", "redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count)", "test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict", "mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_member_repo_update.reset_mock()", "Test revert with exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect = Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called)", "mock.MagicMock() self.db_pool_mock.id = POOL_ID self.db_pool_mock.health_monitor = self.health_mon_mock self.member_mock = {", "(database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE) # Test the", "= [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self,", "self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health", "Not Found Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST',", "mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) # TODO(johnsom) Fix # repo.MemberRepository.delete.assert_called_once_with( # 'TEST', #", "update_amphora_info = database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE,", "'TEST', # POOL_ID, # provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid,", "AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None,", "def test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule =", "mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0,", "TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR)", "Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) #", "revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID)", "the revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR,", "_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\"))", "mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST',", "mock.MagicMock() amp2.id = uuidutils.generate_uuid() lb = mock.MagicMock() lb.amphorae = [amp1,", "mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_health_mon_repo_update.reset_mock()", "id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect", "database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) def test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG,", "mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy = database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID)", "mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon =", "id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid,", "mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def", "id=HM_ID, provisioning_status=constants.ERROR) # TODO(johnsom) fix once provisioning status added #", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test revert with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect", "revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "'TEST', LISTENER_ID, name='test', description='test2') # Test the revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict)", "= data_models.Pool(id='unused_pool') members1 = [{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID: 'member2'}] health_monitor =", "mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG,", "[default_pool, redirect_pool, unused_pool] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools) mark_lb_active =", "= LB_ID self.member_mock = mock.MagicMock() self.member_mock.id = MEMBER_ID self.db_pool_mock =", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update = (database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "# LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def test_reload_amphora(self, mock_amp_get, mock_generate_uuid,", "new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG, mock_get_session,", "with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID,", "'TEST', LISTENER_ID, provisioning_status=constants.DELETED) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with(", "IMAGE_ID = uuidutils.generate_uuid() COMPUTE_FLAVOR = uuidutils.generate_uuid() _amphora_mock = mock.MagicMock() _amphora_mock.id", "exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR)", "fernet.Fernet(key) _pem_mock = fer.encrypt( utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test", "revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the", "mark_member_pending_delete = (database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE) #", "def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "self.l7rule_mock.l7policy = self.l7policy_mock super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def test_create_amphora_in_db(self, mock_create,", "Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock)", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon = database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay':", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule = database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with(", "MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_vip = database_tasks.UpdateVIPAfterAllocation() loadbalancer", "mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp = database_tasks.ReloadAmphora() amp = reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST',", "MEMBER_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "Test the revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) #", "with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID,", "# import random from cryptography import fernet import mock from", "new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def", "id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "= uuidutils.generate_uuid() VIP_IP = '192.168.127.12' VRRP_IP = '172.16.31.10' HA_IP =", "database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) # Test", "mock_list_not_error.reset_mock() listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks.", "mock_l7rule_repo_update.side_effect = Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) def test_get_amphora_details(self,", "Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock)", "Test the revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) #", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST',", "listeners = [listener1, listener2] pools = [default_pool, redirect_pool, unused_pool] lb", "'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test Not Found Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect", "description='test2') # Test the revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_member = database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID)", "# Test the revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR)", "test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health", "amp1 = mock.MagicMock() amp1.id = uuidutils.generate_uuid() amp2 = mock.MagicMock() amp2.id", "'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "@mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class TestDatabaseTasks(base.TestCase): def setUp(self):", "mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def", "LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def test_reload_amphora(self, mock_amp_get, mock_generate_uuid, mock_LOG,", "data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE)", "self.db_pool_mock.id = POOL_ID self.db_pool_mock.health_monitor = self.health_mon_mock self.member_mock = { constants.MEMBER_ID:", "'test', 'description': 'test2', 'vip': {'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test',", "= Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self,", "mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_pool_repo_update.reset_mock()", "mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') # Test revert with", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_member = database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight': 1, 'ip_address':", "{'weight': 1, 'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, weight=1, ip_address='10.1.0.0') #", "mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with", "# Test the revert with exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail')", "delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID,", "'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with LB_ID from listeners mock_loadbalancer_repo_update.reset_mock()", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_members = database_tasks.UpdatePoolMembersOperatingStatusInDB() update_members.execute(POOL_ID, constants.ONLINE)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy = database_tasks.DeleteL7PolicyInDB()", "Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) #", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener = database_tasks.DeleteListenerInDB()", "{'name': 'test', 'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, name='test', description='test2') #", "def test_reload_amphora(self, mock_amp_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test", "= database_tasks.ReloadLoadBalancer() lb = reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock, lb)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listeners = [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb = data_models.LoadBalancer(id=LB_ID,", "SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "= 'FAKE_ROLE' VRRP_ID = random.randrange(255) VRRP_PRIORITY = random.randrange(100) CACHED_ZONE =", "SERVER_GROUP_ID = uuidutils.generate_uuid() LB_NET_IP = '192.0.2.2' LISTENER_ID = uuidutils.generate_uuid() POOL_ID", "POOL_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session,", "uuidutils.generate_uuid() COMPUTE_FLAVOR = uuidutils.generate_uuid() _amphora_mock = mock.MagicMock() _amphora_mock.id = AMP_ID", "'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with(", "members=members2) l7rules = [data_models.L7Rule(id='rule1')] redirect_policy = data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules) l7policies", "@mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "= random.randrange(100) CACHED_ZONE = 'zone1' IMAGE_ID = uuidutils.generate_uuid() COMPUTE_FLAVOR =", "the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) #", "test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update", "= (database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE) # Test", "POOL_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get',", "mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_busy=False)", "mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST',", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock)", "'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool') def", "status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id) # Test the revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called)", "provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id =", "mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_vip = database_tasks.UpdateVIPAfterAllocation()", "create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id) # Test", "mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_listener_repo_update.reset_mock()", "delete_listener = database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID) # Test", "the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "default_pool = data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor) listener1 = data_models.Listener(id='listener1', default_pool=default_pool) members2", "# Test the revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mock_amphora_repo_delete): update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP,", "revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "you may # not use this file except in compliance", "constants.ERROR}) # Test the revert with exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect =", "with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID,", "the revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test", "mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update')", "def test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "= reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update')", "AMP_ID, role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None,", "def setUp(self): self.health_mon_mock = mock.MagicMock() self.health_mon_mock.id = HM_ID self.health_mon_mock.pool_id =", "id=HM_ID, provisioning_status=constants.ERROR) # Test Not Found Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect =", "revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip = _vip_mock get_vip_from_lb_obj", "repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test Not Found Exception mock_health_mon_repo_delete.reset_mock()", "mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create =", "exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect = Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP')", "revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the", "mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid,", "Test the revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) #", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete = (database_tasks.", "def test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db", "mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip = _vip_mock get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer() result =", "test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "language governing permissions and limitations # under the License. #", "repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details = database_tasks.GetAmphoraDetails() new_amp", "mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get',", "self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "# WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id) # Test the revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock()", "= Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self,", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update,", "delay=1, timeout=2) # Test the revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST',", "# Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "# Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) #", "mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE) # Test", "database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID) # Test the revert mock_pool_repo_delete.reset_mock()", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete = (database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock)", "= (database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock)", "LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with(", "repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID) # Test the revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) #", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy = database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with(", "mock_lb_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb =", "self.l7policy_mock super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def test_create_amphora_in_db(self, mock_create, mock_generate_uuid, mock_LOG,", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid,", "provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid,", "mock_amphora_repo_delete): mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE)", "mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update =", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with(", "mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert", "database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test", "# Test the revert with exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect = Exception('fail')", "octavia.tests.unit.base as base AMP_ID = uuidutils.generate_uuid() COMPUTE_ID = uuidutils.generate_uuid() LB_ID", "provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)])", "for the specific language governing permissions and limitations # under", "revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "LB_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "_listener_mock = mock.MagicMock() _listener_to_dict_mock = mock.MagicMock( return_value={'id': LISTENER_ID}) _listener_mock.id =", "with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "mock_repos_pool_update.side_effect = Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update')", "lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with(", "@mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_amphora_repo_delete): mark_member_pending_delete = (database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE)", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert with", "delete_health_mon = database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the", "= [amp1, amp2] mock_amphora_get.side_effect = [_amphora_mock, None] get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer()", "self.l7rule_mock = mock.MagicMock() self.l7rule_mock.id = L7RULE_ID self.l7rule_mock.l7policy = self.l7policy_mock super(TestDatabaseTasks,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db = (database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock,", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) # Test revert with exception", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "= '192.168.127.12' VRRP_IP = '172.16.31.10' HA_IP = '192.168.3.11' AMP_ROLE =", "['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group = database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-',", "mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = ( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID)", "'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert with", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp = database_tasks.ReloadAmphora() amp =", "cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) def test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect = ['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group", "mock_amphora_repo_delete): mark_member_pending_update = (database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE)", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora_with_az(self,", "exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_lb_and_listeners_active.revert(LB_ID,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip = _vip_mock get_vip_from_lb_obj =", "{'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool')", "test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info", "mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_member = database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST',", "@mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "_compute_mock.image_id = IMAGE_ID _compute_mock.compute_flavor = COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update')", "@mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update,", "value='/api') # Test the revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID,", "test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2 =", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert", "AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock)", "= mock.MagicMock() self.listener_mock.id = LISTENER_ID self.loadbalancer_mock = mock.MagicMock() self.loadbalancer_mock.id =", "(database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the", "# Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail')", "= Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self,", "get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer() result = get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def", "class TestDatabaseTasks(base.TestCase): def setUp(self): self.health_mon_mock = mock.MagicMock() self.health_mon_mock.id = HM_ID", "revert with exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect =", "revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with LB_ID from listeners", "return_value={'id': LISTENER_ID}) _listener_mock.id = LISTENER_ID _listener_mock.to_dict = _listener_to_dict_mock _tf_failure_mock =", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid,", "the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener = database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST',", "mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy =", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active = database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details = database_tasks.GetAmphoraDetails() new_amp = get_amp_details.execute(_amphora_mock)", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db = (database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with(", "= database_tasks.GetAmphoraDetails() new_amp = get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP,", "[mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock()", "the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test", "= mock.MagicMock() amp1.id = uuidutils.generate_uuid() amp2 = mock.MagicMock() amp2.id =", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {'loadbalancer_id': LB_ID}", "uuidutils.generate_uuid() lb = mock.MagicMock() lb.amphorae = [amp1, amp2] mock_amphora_get.side_effect =", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = ( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with(", "self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "no LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() # Test", "def test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db = (database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock)", "mock_vip_update, mock_loadbalancer_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_vip", "self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect", "mock_amphora_repo_delete): mark_l7rule_pending_update = (database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE)", "(database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.ACTIVE) # Test the revert", "mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert", "mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members,", "'TEST', LB_ID, name='test', description='test2') # Test the revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock)", "= uuidutils.generate_uuid() COMPUTE_ID = uuidutils.generate_uuid() LB_ID = uuidutils.generate_uuid() SERVER_GROUP_ID =", "update_dict) # Test the revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete = (database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "def test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip", "fer.encrypt( utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_expiration=_cert_mock) def", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "@mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def", "mock_amphora_repo_delete): mock_listener_get.return_value = _listener_mock _loadbalancer_mock.listeners = [_listener_mock] get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer()", "{constants.MEMBER_ID: 'member2'}] health_monitor = data_models.HealthMonitor(id='hm1') default_pool = data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update = (database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "1, 'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, weight=1, ip_address='10.1.0.0') # Test", "@mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class TestDatabaseTasks(base.TestCase):", "mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration() key = utils.get_six_compatible_server_certs_key_passphrase() fer =", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted = database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock)", "mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7rule_repo_update.reset_mock()", "assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora(self,", "loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG,", "test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F =", "id=LISTENER_ID) # Test the revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update')", "mock_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db =", "= mock.MagicMock() self.db_pool_mock.id = POOL_ID self.db_pool_mock.health_monitor = self.health_mon_mock self.member_mock =", "_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) # Test the", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create = (database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "'TEST', HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock)", "repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock()", "update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid,", "_loadbalancer_mock = mock.MagicMock() _loadbalancer_mock.id = LB_ID _loadbalancer_mock.amphorae = [_amphora_mock] _l7policy_mock", "mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) # TODO(johnsom) Fix # repo.PoolRepository.update.assert_called_once_with( # 'TEST', #", "Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG,", "= (database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID,", "mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert", "'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_vip = database_tasks.UpdateVIPAfterAllocation() loadbalancer =", "'member3'}, {constants.MEMBER_ID: 'member4'}] redirect_pool = data_models.Pool(id='redirect_pool', members=members2) l7rules = [data_models.L7Rule(id='rule1')]", "constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) # Test the revert mock_l7policy_repo_update.reset_mock()", "mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) #", "id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "_vip_mock.port_id = PORT_ID _vip_mock.subnet_id = SUBNET_ID _vip_mock.ip_address = VIP_IP _vrrp_group_mock", "mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG,", "provisioning_status=constants.ACTIVE) # Test with LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict = {'type': 'SOURCE_IP', 'cookie_name':", "'TEST', POOL_ID, update_dict) # Test the revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with(", "Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') update_health_mon.revert(self.health_mon_mock)", "You may obtain # a copy of the License at", "update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members')", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock)", "update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self, mock_repos_pool_update,", "reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_reload_load_balancer(self,", "mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid,", "test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listeners =", "'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test", "the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST',", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete = (database_tasks.", "= mock.MagicMock() self.health_mon_mock.id = HM_ID self.health_mon_mock.pool_id = POOL_ID self.listener_mock =", "'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "repo import octavia.tests.unit.base as base AMP_ID = uuidutils.generate_uuid() COMPUTE_ID =", "# Test the revert with exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail')", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id)", "repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)])", "= (database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE) # Test", "# 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete,", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self,", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') # Test the revert mock_loadbalancer_repo_update.reset_mock()", "with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "LB_ID, name='test', description='test2') # Test the revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "database_tasks.ReloadLoadBalancer() lb = reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get',", "mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with", "with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID,", "repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls(", "def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG,", "provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID,", "None] get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer() result = get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get')", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value = _listener_mock _loadbalancer_mock.listeners = [_listener_mock]", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details = database_tasks.GetAmphoraDetails() new_amp = get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer = database_tasks.UpdateLoadbalancerInDB()", "# Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail')", "'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG,", "with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID,", "exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = ( database_tasks.MarkLBAmphoraeHealthBusy())", "@mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_lb_update, mock_listener_update, mock_amphora_update,", "cryptography import fernet import mock from oslo_db import exception as", "AMP_ID, status=constants.DELETED) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "= database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, None) self.assertEqual(_amphora_mock.id,", "database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.DELETED) # Test the revert", "= (database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) # Test", "# Test revert with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None,", "'TEST', id=AMP_ID) def test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update,", "database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test", "update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self, mock_l7policy_repo_update,", "Fix # repo.PoolRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete')", "mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update')", "repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update')", "the revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR)", "mark_l7rule_pending_update = (database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) #", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae)", "[mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST',", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock)", "mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with", "mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_member =", "software # distributed under the License is distributed on an", "(the \"License\"); you may # not use this file except", "Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG,", "mock_vrrp_group_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect =", "uuidutils.generate_uuid() HM_ID = uuidutils.generate_uuid() MEMBER_ID = uuidutils.generate_uuid() PORT_ID = uuidutils.generate_uuid()", "revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "def test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details = database_tasks.GetAmphoraDetails()", "mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration() key = utils.get_six_compatible_server_certs_key_passphrase() fer", "ip_address='10.1.0.0') # Test the revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active = database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "database_tasks.UpdateListenerInDB() listener_dict = {constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict, {'name': 'test', 'description': 'test2'})", "revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the", "mock_listener_repo_update.side_effect = Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with(", "@mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info = database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID,", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "= Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self,", "def test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "delete_pool = database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID) # Test the", "(database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_DELETE) # Test the", "listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update')", "qos_policy_id='fool') def test_update_listener_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_amphora_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp1 =", "exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR)", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect = ['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group =", "def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name':", "repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) def test_get_amphora_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "health_monitor = data_models.HealthMonitor(id='hm1') default_pool = data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor) listener1 =", "id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id) # Test the revert create_amp_in_db.revert(_tf_failure_mock)", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mark_listener_deleted = database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.DELETED) # Test", "= database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID) # Test the revert", "base AMP_ID = uuidutils.generate_uuid() COMPUTE_ID = uuidutils.generate_uuid() LB_ID = uuidutils.generate_uuid()", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID)", "mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with", "= database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'})", "provisioning_status=constants.PENDING_CREATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID,", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update = (database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID,", "mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active =", "mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) def test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete = (database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule = database_tasks.DeleteL7RuleInDB()", "get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer() result = get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}],", "with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID,", "def test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the", "repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)])", "# Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR)", "_amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "\"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY", "= database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID,", "l7policies = [redirect_policy] listener2 = data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies = l7policies", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE,", "# Test the revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR)", "listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id,", "mock_amphora_repo_delete): mock_get_session.side_effect = ['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group = database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with(", "load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def", "= get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid, mock_LOG,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create = (database_tasks.", "= database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID) # Test the revert", "update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) # Test the", "load_balancer_id=LB_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with(", "test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update", "mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with", "mock_amphora_repo_delete): delete_l7policy = database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID) # Test", "Test the revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) # TODO(johnsom) Fix # repo.PoolRepository.update.assert_called_once_with(", "LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session,", "mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid,", "(database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert", "mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error,", "data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules) l7policies = [redirect_policy] listener2 = data_models.Listener(id='listener2', l7policies=l7policies)", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update = (database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "mock_member_repo_update.side_effect = Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db = (database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR)", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active = (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.ACTIVE)", "repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB()", "'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "or implied. See the # License for the specific language", "mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def", "id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect", "redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count)", "L7POLICY_ID _l7rule_mock = mock.MagicMock() _l7rule_mock.id = L7RULE_ID _listener_mock = mock.MagicMock()", "AMP_ID, status=constants.PENDING_DELETE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create = (database_tasks.", "test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete =", "= [default_pool, redirect_pool, unused_pool] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools) mark_lb_active", "id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect", "create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1)", "mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count)", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update = (database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with(", "mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) # Test the revert with exception mock_listener_repo_update.reset_mock()", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID}", "_listener_to_dict_mock _tf_failure_mock = mock.Mock(spec=failure.Failure) _vip_mock = mock.MagicMock() _vip_mock.port_id = PORT_ID", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active = database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock)", "constants from octavia.common import data_models from octavia.common import utils from", "provisioning_status=constants.ERROR) # Test the revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') update_member.revert(self.member_mock)", "'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create", "Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_update.revert(POOL_ID)", "test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active", "id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update =", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock,", "revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR)", "_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) # Test revert with", "exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID,", "update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP,", "from listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID}", "import exception as odb_exceptions from oslo_utils import uuidutils from sqlalchemy.orm", "# repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock)", "= database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, update_dict) # Test", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB())", "repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid,", "self, mock_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = ( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock)", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update = (database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID,", "provisioning_status=constants.ERROR) def test_get_amphora_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "update_health_mon.execute(self.health_mon_mock, {'delay': 1, 'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, delay=1, timeout=2)", "# Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail')", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid,", "Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_delete.revert(self.member_mock)", "ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "# Test with no LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called() #", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "LB_ID, name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool') def test_update_listener_in_db(self, mock_generate_uuid, mock_LOG,", "mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert", "@mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_amphora_repo_delete): mark_amp_deleted_in_db = (database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.DELETED)", "action=constants.L7POLICY_ACTION_REJECT) # Test the revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID,", "'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock()", "= LISTENER_ID _listener_mock.to_dict = _listener_to_dict_mock _tf_failure_mock = mock.Mock(spec=failure.Failure) _vip_mock =", "_listener_mock _loadbalancer_mock.listeners = [_listener_mock] get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer() result = get_list_from_lb_obj.execute(_loadbalancer_mock)", "Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) def", "= (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE) # Test the", "'TEST', AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None,", "self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) # Test", "repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) def test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted = database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete = (database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID,", "= 'zone1' IMAGE_ID = uuidutils.generate_uuid() COMPUTE_FLAVOR = uuidutils.generate_uuid() _amphora_mock =", "test_reload_amphora(self, mock_amp_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp", "def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF", "listeners=listeners, pools=pools) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE)", "mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert", "mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update')", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with(", "database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID) # Test the revert mock_member_repo_delete.reset_mock()", "# TODO(johnsom) Fix # repo.MemberRepository.delete.assert_called_once_with( # 'TEST', # MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete')", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "# Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail')", "_compute_mock.cached_zone = CACHED_ZONE _compute_mock.image_id = IMAGE_ID _compute_mock.compute_flavor = COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete')", "revert with exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect = Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST',", "delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID) # Test the revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock)", "update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration() key = utils.get_six_compatible_server_certs_key_passphrase() fer = fernet.Fernet(key) _pem_mock", "exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "mock_listener_repo_update.side_effect = Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error')", "update_vip = database_tasks.UpdateVIPAfterAllocation() loadbalancer = update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST',", "test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create", "database_tasks.GetListenersFromLoadbalancer() result = get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result) def", "the revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) # Test", "'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id)", "self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id, amp_id)", "'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amphora_vip_data2(self, mock_generate_uuid,", "mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert", "@mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR)", "the revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock)", "mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid,", "= database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID,", "with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID,", "KIND, either express or implied. See the # License for", "revert no LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() #", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool = database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID)", "test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock)", "get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE,", "mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule =", "test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule", "mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock)", "COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID)", "mark_loadbalancer_pending_delete = (database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE) #", "type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') # Test the revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with(", "test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted =", "mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert", "mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) # Test the revert", "vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id( self, mock_associate, mock_generate_uuid, mock_LOG,", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update = (database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID,", "fer = fernet.Fernet(key) _pem_mock = fer.encrypt( utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id, _pem_mock)", "= data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id,", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True)", "@mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid,", "= (database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) # Test", "AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid, mock_LOG,", "mock_lb_update, mock_listener_update, mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id = LB_ID update_load_balancer = database_tasks.UpdateLoadbalancerInDB()", "# a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "mock_l7policy_repo_update.side_effect = Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update')", "mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update')", "amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_member", "None]) def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def", "mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid,", "uuidutils.generate_uuid() L7POLICY_ID = uuidutils.generate_uuid() L7RULE_ID = uuidutils.generate_uuid() VIP_IP = '192.168.127.12'", "mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update')", "mock_amphora_repo_delete): mark_health_mon_pending_create = (database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE)", "# Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR)", "role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None)", "repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update,", "@mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG,", "the revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) # Test", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE),", "delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock)", "AMP_ID, role=None, vrrp_priority=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect", "import repositories as repo import octavia.tests.unit.base as base AMP_ID =", "mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1,", "self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete", "id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock)", "# LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG,", "'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) # Test revert with exception mock_amphora_repo_update.reset_mock()", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info = database_tasks.UpdateAmphoraInfo()", "Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_create.revert(self.member_mock)", "import data_models from octavia.common import utils from octavia.controller.worker.v2.tasks import database_tasks", "mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() # Test the revert with", "implied. See the # License for the specific language governing", "provisioning_status=constants.ERROR) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect =", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, name='test', description='test2') # Test the revert mock_listener_repo_update.reset_mock()", "uuidutils.generate_uuid() _amphora_mock = mock.MagicMock() _amphora_mock.id = AMP_ID _amphora_mock.compute_id = COMPUTE_ID", "self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "# repo.MemberRepository.delete.assert_called_once_with( # 'TEST', # MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self, mock_pool_repo_delete,", "amp2.id = uuidutils.generate_uuid() lb = mock.MagicMock() lb.amphorae = [amp1, amp2]", "status=constants.ERROR) def test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "update_l7rule = database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value':", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = (database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock)", "id=AMP_ID) def test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete = (database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "'session_persistence': sp_dict} update_pool = database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID,", "Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self,", "_l7policy_mock.id = L7POLICY_ID _l7rule_mock = mock.MagicMock() _l7rule_mock.id = L7RULE_ID _listener_mock", "Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock)", "= uuidutils.generate_uuid() LB_NET_IP = '192.0.2.2' LISTENER_ID = uuidutils.generate_uuid() POOL_ID =", "= uuidutils.generate_uuid() POOL_ID = uuidutils.generate_uuid() HM_ID = uuidutils.generate_uuid() MEMBER_ID =", "exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid,", "= Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def", "mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE) # Test", "mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active =", "AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id,", "database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test", "update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create = (database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) #", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG, mock_get_session,", "'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_member_repo_update.reset_mock()", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG,", "@mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID) # Test the revert", "[mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST',", "def test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "MEMBER_ID self.db_pool_mock = mock.MagicMock() self.db_pool_mock.id = POOL_ID self.db_pool_mock.health_monitor = self.health_mon_mock", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock()", "= reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def", "status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "oslo_utils import uuidutils from sqlalchemy.orm import exc from taskflow.types import", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = ( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with(", "test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip = _vip_mock", "revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the", "provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)])", "id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG, mock_get_session,", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with no LB_ID mock_loadbalancer_repo_update.reset_mock()", "provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls(", "# under the License. # import random from cryptography import", "Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_delete.revert(POOL_ID)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = (database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert", "revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) # TODO(johnsom) Fix # repo.MemberRepository.delete.assert_called_once_with( # 'TEST',", "'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) #", "def test_create_amphora_in_db(self, mock_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_amphora_repo_delete): assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID) #", "= Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self, mock_generate_uuid,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_lb_update, mock_listener_update,", "revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the", "= POOL_ID self.db_pool_mock.health_monitor = self.health_mon_mock self.member_mock = { constants.MEMBER_ID: MEMBER_ID,", "mock_amphora_repo_delete): mark_l7policy_pending_create = (database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE)", "MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "= uuidutils.generate_uuid() L7RULE_ID = uuidutils.generate_uuid() VIP_IP = '192.168.127.12' VRRP_IP =", "mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update')", "mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with", "= Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def", "repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self,", "mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update')", "mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with", "provisioning_status=constants.ERROR) # TODO(johnsom) fix once provisioning status added # repo.HealthMonitorRepository.update.assert_called_once_with(", "provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR),", "provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') # Test the revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST',", "mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip = LB_NET_IP mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "self.l7policy_mock = mock.MagicMock() self.l7policy_mock.id = L7POLICY_ID self.l7rule_mock = mock.MagicMock() self.l7rule_mock.id", "Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update,", "l7rules[0].id, provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "= (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db = (database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "revert LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "id=LB_ID, provisioning_status=constants.ERROR) # Test the revert no LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock()", "'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) def test_get_amphora_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "# Test the revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete')", "mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR)", "mark_amp_allocated_in_db = (database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED,", "obtain # a copy of the License at # #", "listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete')", "'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active", "@mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "default_pool=default_pool) members2 = [{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID: 'member4'}] redirect_pool = data_models.Pool(id='redirect_pool',", "Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG,", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE,", "self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock()", "_vip_mock.ip_address = VIP_IP _vrrp_group_mock = mock.MagicMock() _cert_mock = mock.MagicMock() _compute_mock", "revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='STANDALONE', vrrp_priority=None)", "provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with(", "revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR)", "[{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID: 'member4'}] redirect_pool = data_models.Pool(id='redirect_pool', members=members2) l7rules =", "= map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, None) self.assertEqual(_amphora_mock.id, amp_id) amp_id =", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB()", "ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert", "repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id) # Test the", "= _listener_to_dict_mock _tf_failure_mock = mock.Mock(spec=failure.Failure) _vip_mock = mock.MagicMock() _vip_mock.port_id =", "failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB()", "id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_lb_update,", "test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db =", "L7RULE_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) # Test", "mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG,", "MEMBER_ID = uuidutils.generate_uuid() PORT_ID = uuidutils.generate_uuid() SUBNET_ID = uuidutils.generate_uuid() VRRP_PORT_ID", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID)", "mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id,", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "status=constants.ERROR, compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "= (database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_DELETE) # Test", "test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip =", "provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update,", "exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR)", "self.loadbalancer_mock.vip.load_balancer_id = LB_ID update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description':", "= (database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE) # Test", "database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay': 1, 'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, delay=1,", "the revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID)", "VRRP_PRIORITY = random.randrange(100) CACHED_ZONE = 'zone1' IMAGE_ID = uuidutils.generate_uuid() COMPUTE_FLAVOR", "LISTENER_ID self.loadbalancer_mock = mock.MagicMock() self.loadbalancer_mock.id = LB_ID self.member_mock = mock.MagicMock()", "side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "VRRP_PORT_ID _amphora_mock.role = AMP_ROLE _amphora_mock.vrrp_id = VRRP_ID _amphora_mock.vrrp_priority = VRRP_PRIORITY", "'TEST', AMP_ID, role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate,", "id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail')", "test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect", "repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active", "'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db = (database_tasks.", "self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def", "map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None])", "@mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api')", "revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) # TODO(johnsom) Fix # repo.PoolRepository.update.assert_called_once_with( # 'TEST',", "mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid,", "data_models from octavia.common import utils from octavia.controller.worker.v2.tasks import database_tasks from", "mock_amphora_repo_delete): mark_amp_allocated_in_db = (database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "= uuidutils.generate_uuid() VRRP_PORT_ID = uuidutils.generate_uuid() HA_PORT_ID = uuidutils.generate_uuid() L7POLICY_ID =", "revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) # Test the", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon = database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock,", "mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy =", "'TEST', HM_ID, provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update = (database_tasks.", "'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "_amphora_mock.vrrp_ip = VRRP_IP _amphora_mock.ha_ip = HA_IP _amphora_mock.ha_port_id = HA_PORT_ID _amphora_mock.vrrp_port_id", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with(", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update = (database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool') def test_update_listener_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "test_create_amphora_in_db(self, mock_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db", "def test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data", "# Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR)", "{'type': 'SOURCE_IP', 'cookie_name': None} update_dict = {'name': 'test', 'description': 'test2',", "(database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) # Test the", "# Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR)", "mock_amphora_repo_delete): delete_pool = database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID) # Test", "provisioning_status=constants.ERROR) # Test revert with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail')", "mock.MagicMock() _listener_to_dict_mock = mock.MagicMock( return_value={'id': LISTENER_ID}) _listener_mock.id = LISTENER_ID _listener_mock.to_dict", "id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert LB_ID from listeners", "'fakeaz') self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with(", "id=LISTENER_ID, provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "# Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "# Test the revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status':", "revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the", "self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST',", "mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon", "Test the revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) #", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "@mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def test_reload_amphora(self, mock_amp_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None)", "result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock.Mock(spec=failure.Failure) _vip_mock = mock.MagicMock() _vip_mock.port_id = PORT_ID _vip_mock.subnet_id = SUBNET_ID", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails()", "@mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "LB_NET_IP _compute_mock.cached_zone = CACHED_ZONE _compute_mock.image_id = IMAGE_ID _compute_mock.compute_flavor = COMPUTE_FLAVOR", "'TEST', id=HM_ID, provisioning_status=constants.ERROR) # TODO(johnsom) fix once provisioning status added", "return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active = (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "LB_ID update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2', 'vip':", "VIP_IP = '192.168.127.12' VRRP_IP = '172.16.31.10' HA_IP = '192.168.3.11' AMP_ROLE", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db = database_tasks.CreateAmphoraInDB()", "amp_id = create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description':", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon = database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay': 1, 'timeout':", "lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id,", "provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls(", "ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id( self, mock_associate, mock_generate_uuid, mock_LOG, mock_get_session,", "= (database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) # Test", "_compute_mock.compute_flavor = COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG')", "'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') # Test", "role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock()", "with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID,", "= mock.MagicMock() lb.amphorae = [amp1, amp2] mock_amphora_get.side_effect = [_amphora_mock, None]", "'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock)", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete = (database_tasks. MarkListenerPendingDeleteInDB())", "# Test Not Found Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect = ['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group = database_tasks.CreateVRRPGroupForLB()", "revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "def test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details", "= Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self,", "_cert_mock = mock.MagicMock() _compute_mock = mock.MagicMock() _compute_mock.lb_network_ip = LB_NET_IP _compute_mock.cached_zone", "= {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID,", "'')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session,", "lb_network_ip=LB_NET_IP) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID,", "'TEST', AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "= database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-',", "Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) #", "Test the revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST',", "mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock)", "with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID,", "= {'loadbalancer_id': LB_ID} mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "2.0 (the \"License\"); you may # not use this file", "database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID,", "(database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE) # Test the", "get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert", "Test the revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) #", "Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db( self,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create = (database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create = (database_tasks. MarkMemberPendingCreateInDB())", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb = database_tasks.ReloadLoadBalancer() lb = reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with(", "provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self, mock_generate_uuid,", "'TEST', id=LISTENER_ID) # Test the revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called()", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id", "mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert", "amphora_id=AMP_ID, busy=True) def test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete = (database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) #", "constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID)", "by applicable law or agreed to in writing, software #", "provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID,", "POOL_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "amp2] mock_amphora_get.side_effect = [_amphora_mock, None] get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer() result =", "test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert", "mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update')", "= _vip_mock get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer() result = get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result)", "repositories as repo import octavia.tests.unit.base as base AMP_ID = uuidutils.generate_uuid()", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.DELETED) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock)", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert", "mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid,", "database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid,", "= mock.MagicMock() _l7rule_mock.id = L7RULE_ID _listener_mock = mock.MagicMock() _listener_to_dict_mock =", "image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) def test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener", "= database_tasks.ReloadAmphora() amp = reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) self.assertEqual(_amphora_mock, amp)", "Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID,", "mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with", "provisioning_status=constants.ERROR) # Test the revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') update_l7policy.revert(self.l7policy_mock)", "MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert", "self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1,", "_loadbalancer_mock.id = LB_ID _loadbalancer_mock.amphorae = [_amphora_mock] _l7policy_mock = mock.MagicMock() _l7policy_mock.id", "provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect =", "HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect", "self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with", "status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session,", "provisioning_status=constants.PENDING_CREATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID,", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self,", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update = (database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID,", "Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock)", "repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with(", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update = (database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID,", "= database_tasks.UpdateVIPAfterAllocation() loadbalancer = update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID:", "'TEST', POOL_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "loadbalancer_id=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail')", "_l7policy_mock = mock.MagicMock() _l7policy_mock.id = L7POLICY_ID _l7rule_mock = mock.MagicMock() _l7rule_mock.id", "LB_NET_IP mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID,", "l7rules=l7rules) l7policies = [redirect_policy] listener2 = data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies =", "from oslo_db import exception as odb_exceptions from oslo_utils import uuidutils", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create = (database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "mock_amphora_repo_delete): mark_member_pending_create = (database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE)", "random.randrange(100) CACHED_ZONE = 'zone1' IMAGE_ID = uuidutils.generate_uuid() COMPUTE_FLAVOR = uuidutils.generate_uuid()", "_vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID)", "= Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self,", "POOL_ID self.db_pool_mock.health_monitor = self.health_mon_mock self.member_mock = { constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID:", "exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR)", "revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # TODO(johnsom) fix", "self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST',", "mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7rule_repo_update.reset_mock()", "MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_DELETE) # Test the revert", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db = (database_tasks. MarkAmphoraPendingDeleteInDB())", "database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID) # Test the", "mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID,", "= Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) def test_get_amphora_details(self, mock_generate_uuid,", "_l7rule_mock = mock.MagicMock() _l7rule_mock.id = L7RULE_ID _listener_mock = mock.MagicMock() _listener_to_dict_mock", "L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def", "mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with", "'TEST', AMP_ID, cert_busy=False) def test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule = database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock)", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create = (database_tasks. MarkHealthMonitorPendingCreateInDB())", "the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # TODO(johnsom)", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock()", "applicable law or agreed to in writing, software # distributed", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db = database_tasks.CreateAmphoraInDB() amp_id = create_amp_in_db.execute()", "repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID) # Test the revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) #", "mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) #", "revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the", "@mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid, mock_LOG,", "mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock,", "revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "# Test the revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff) Fix #", "import exc from taskflow.types import failure from octavia.common import constants", "'TEST', AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "self.l7rule_mock.id = L7RULE_ID self.l7rule_mock.l7policy = self.l7policy_mock super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock)", "status=constants.ERROR) def test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express", "mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock)", "delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test Not Found Exception", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID()", "Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock)", "def test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted", "Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get',", "= mock.MagicMock() _l7policy_mock.id = L7POLICY_ID _l7rule_mock = mock.MagicMock() _l7rule_mock.id =", "def test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict])", "Test the revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST',", "= mock.MagicMock() self.member_mock.id = MEMBER_ID self.db_pool_mock = mock.MagicMock() self.db_pool_mock.id =", "self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() # Test the revert", "random from cryptography import fernet import mock from oslo_db import", "'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, name='test', description='test2') # Test the", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.DELETED) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock)", "MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert", "exception as odb_exceptions from oslo_utils import uuidutils from sqlalchemy.orm import", "Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def", "mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock)", "= fer.encrypt( utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_expiration=_cert_mock)", "vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self,", "id=MEMBER_ID) # Test the revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) # TODO(johnsom) Fix", "the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test", "mock_amphora_repo_delete): create_amp_in_db = database_tasks.CreateAmphoraInDB() amp_id = create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID,", "mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count)", "'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.DELETED) # Test", "mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_amphora_repo_update, mock_amphora_repo_delete): amp1 = mock.MagicMock() amp1.id = uuidutils.generate_uuid() amp2 =", "exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR)", "repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id,", "= database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.DELETED) # Test the", "= data_models.Pool(id='redirect_pool', members=members2) l7rules = [data_models.L7Rule(id='rule1')] redirect_policy = data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool,", "@mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "{'name': 'test', 'description': 'test2', 'session_persistence': sp_dict} update_pool = database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp = database_tasks.ReloadAmphora()", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB())", "new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID,", "# License for the specific language governing permissions and limitations", "data_models.Pool(id='unused_pool') members1 = [{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID: 'member2'}] health_monitor = data_models.HealthMonitor(id='hm1')", "'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect =", "mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_ready_in_db(self,", "'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_member = database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with(", "def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "= ['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group = database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID,", "= LB_NET_IP _amphora_mock.vrrp_ip = VRRP_IP _amphora_mock.ha_ip = HA_IP _amphora_mock.ha_port_id =", "MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert", "Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0,", "amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_update.revert(self.member_mock)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse()", "database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock)", "mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect = ['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group = database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock)", "mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid,", "Test the revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR})", "_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB()", "revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the", "id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora()", "octavia.controller.worker.v2.tasks import database_tasks from octavia.db import repositories as repo import", "mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) def", "test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "# Test the revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with(", "[_listener_mock] get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer() result = get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID:", "L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail')", "create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid,", "test_update_listener_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_listener =", "def test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "= [listener1, listener2] pools = [default_pool, redirect_pool, unused_pool] lb =", "added # repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete')", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def", "'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "= Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid,", "mark_l7policy_pending_update = (database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) #", "from oslo_utils import uuidutils from sqlalchemy.orm import exc from taskflow.types", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool = database_tasks.DeletePoolInDB()", "assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID) # Test", "port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG, mock_get_session,", "mark_health_mon_pending_delete = (database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE) #", "mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) def", "= data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies = l7policies listeners = [listener1, listener2]", "# Test the revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR)", "lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) def test_mark_listener_deleted_in_db(self, mock_generate_uuid,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info = database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "VRRP_ID = random.randrange(255) VRRP_PRIORITY = random.randrange(100) CACHED_ZONE = 'zone1' IMAGE_ID", "the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip =", "LISTENER_ID = uuidutils.generate_uuid() POOL_ID = uuidutils.generate_uuid() HM_ID = uuidutils.generate_uuid() MEMBER_ID", "uuidutils.generate_uuid() LB_ID = uuidutils.generate_uuid() SERVER_GROUP_ID = uuidutils.generate_uuid() LB_NET_IP = '192.0.2.2'", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration()", "mock_repos_pool_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict =", "the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test", "revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) # Test revert with exception", "= AMP_ROLE _amphora_mock.vrrp_id = VRRP_ID _amphora_mock.vrrp_priority = VRRP_PRIORITY _amphorae =", "update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, update_dict) # Test the revert mock_repos_pool_update.reset_mock()", "MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock)", "mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) # Test revert", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_UPDATE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock)", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy = database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST',", "exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "operating_status=constants.ONLINE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID,", "revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete = (database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "'TEST', amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG,", "License. You may obtain # a copy of the License", "[listener1, listener2] pools = [default_pool, redirect_pool, unused_pool] lb = data_models.LoadBalancer(id=LB_ID,", "mock_amphora_repo_delete): mark_listener_deleted = database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.DELETED) #", "[mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1,", "ANY KIND, either express or implied. See the # License", "from octavia.common import utils from octavia.controller.worker.v2.tasks import database_tasks from octavia.db", "revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) #", "= mock.MagicMock() _compute_mock = mock.MagicMock() _compute_mock.lb_network_ip = LB_NET_IP _compute_mock.cached_zone =", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test',", "'TEST', LB_ID, provisioning_status=constants.DELETED) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute( self.loadbalancer_mock.id,", "SERVER_GROUP_ID) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect =", "Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_active.revert(self.l7policy_mock)", "l7rules = [data_models.L7Rule(id='rule1')] redirect_policy = data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules) l7policies =", "# TODO(johnsom) Fix # repo.PoolRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, #", "operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "mock.MagicMock() self.loadbalancer_mock.id = LB_ID self.member_mock = mock.MagicMock() self.member_mock.id = MEMBER_ID", "mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update')", "self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST',", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_listener = database_tasks.UpdateListenerInDB() listener_dict", "mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid,", "= uuidutils.generate_uuid() L7POLICY_ID = uuidutils.generate_uuid() L7RULE_ID = uuidutils.generate_uuid() VIP_IP =", "update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def", "the revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test", "'10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, weight=1, ip_address='10.1.0.0') # Test the revert", "= Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate',", "'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with(", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create = (database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb = database_tasks.ReloadLoadBalancer() lb = reload_lb.execute(LB_ID)", "mock_amphora_repo_update, mock_amphora_repo_delete): listeners = [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners)", "repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls(", "'test', 'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, name='test', description='test2') # Test", "# repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def", "mock_amphora_repo_delete): mark_l7rule_pending_delete = (database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE)", "busy=True) def test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "# Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail')", "test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info =", "the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test", "the revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test", "repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)])", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete = (database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock)", "'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) # Test the revert with exception", "status=constants.ERROR, compute_id=COMPUTE_ID) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect", "repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count)", "= map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "# Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR)", "the revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP)", "# Test the revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR,", "= uuidutils.generate_uuid() _amphora_mock = mock.MagicMock() _amphora_mock.id = AMP_ID _amphora_mock.compute_id =", "# Test the revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR)", "mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called() # Test the revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock()", "_amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) # Test the revert", "id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect", "# POOL_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG,", "mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def" ]
[ "'3', 'url': 'recruitment'}, #{'id': '4', 'url': 'software'}, #{'id': '5', 'url':", "error connection at url : \" + str(e)) finally: print('done')", "+ \"/\" + city html_search = requests.get(url_search) soup_search = BeautifulSoup(html_search.content,", "+ str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5: number_of_pages += round(number_of_pages_with_coma) print('number_of_pages", "'201', 'nom': 'kretingoje'},#Kretinga {'id': '202', 'nom': 'kupiskyje'},#Kupiškis {'id': '203', 'nom':", "nothing') if number_of_pages > 1: for i in range(2, number_of_pages", "sql = \"INSERT INTO `emails` (\" \\ \"`id_activite`, \" \\", "time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div',", "{'id': '209', 'nom': 'pakruojyje'},#Pakruojis {'id': '210', 'nom': 'palangoje'},#Palanga {'id': '211',", "if soup.find('a', {'itemprop': 'email'}) is not None: email = \"info@\"", "'klaipedoje'},#Klaipėda {'id': '201', 'nom': 'kretingoje'},#Kretinga {'id': '202', 'nom': 'kupiskyje'},#Kupiškis {'id':", "'company_list'})\\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result = result_item.find('a',", "number_of_pages += round(number_of_pages_with_coma) print('number_of_pages : ' + str(number_of_pages)) i_1 =", "'zarasuose'}#Zarasai ] try: for capitale in capitales_du_monde: for activite in", "def test_extract_one_email(self): url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request the content of", "'nom': 'alytuje'},#Alytus {'id': '185', 'nom': 'anyksciuose'},#Anykščiai {'id': '186', 'nom': 'birstone'},#Birštonas", "{'id': '187', 'nom': 'birzuose'},#Biržai {'id': '188', 'nom': 'druskininkuose'},#Druskininkai {'id': '189',", "in capitales_du_monde: for activite in activites: try: activity = activite.get(\"url\")", "soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not", "url : \" + str(e)) finally: print('done') if __name__ ==", "' email : ' + email) else: print(str(i_1) + '", "soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None: for result_item in soup_of_one_page_of_results", "'pakruojyje'},#Pakruojis {'id': '210', 'nom': 'palangoje'},#Palanga {'id': '211', 'nom': 'panevezyje'},#Panevėžys {'id':", "'nom': 'druskininkuose'},#Druskininkai {'id': '189', 'nom': 'elektrenuose'},#Elektrėnai {'id': '190', 'nom': 'ignalinoje'},#Ignalina", "= result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2) # Request the content of", "except: print(str(i_1) + \" The record already exists : \"", "if soup_result.find('a', {'itemprop': 'email'}) is not None: email = \"info@\"", "'vilkaviskyje'},#Vilkaviškis {'id': '234', 'nom': 'vilniuje'},#Vilnius {'id': '235', 'nom': 'visagine'},#Visaginas {'id':", "'url': 'dentist'}, #{'id': '26', 'url': 'doctor'}, #{'id': '27', 'url': 'accounting'},", "print('number_of_pages : ' + str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5: number_of_pages", "= 0 if soup_search.find('div', {'class': 'company_list'}) is not None: print(url_search)", "'restaurant'}, #{'id': '11', 'url': 'building'}, #{'id': '12', 'url': 'hairdresser'}, #{'id':", "'url': 'doctor'}, #{'id': '27', 'url': 'accounting'}, #{'id': '28', 'url': 'store'},", "\\ \"`email`) VALUE (%s, %s, %s)\" cursor.execute(sql, ( activite.get('id'), capitale.get('id'),", "'jurbarke'},#Jurbarkas {'id': '194', 'nom': 'kaisiadoryse'},#Kaišiadorys {'id': '195', 'nom': 'kalvarijoje'},#Kalvarija {'id':", "for result_item in soup_of_one_page_of_results\\ .find('div', {'class': 'company_list'})\\ .find_all('div', {'class': 'item'}):", "'nom': 'kretingoje'},#Kretinga {'id': '202', 'nom': 'kupiskyje'},#Kupiškis {'id': '203', 'nom': 'lazdijuose'},#Lazdijai", "email) connection.close() except Exception as e: print(str(i_1) + \" An", "else: print('no email business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity = \"hotel\" city", "print('number_of_pages : ' + str(number_of_pages)) i_1 = 0 if soup_search.find('div',", "= [ {'id': '183', 'nom': 'akmeneje'},#Akmenė {'id': '184', 'nom': 'alytuje'},#Alytus", "[ {'id': '183', 'nom': 'akmeneje'},#Akmenė {'id': '184', 'nom': 'alytuje'},#Alytus {'id':", "'nom': 'kelmeje'},#Kelmė {'id': '200', 'nom': 'klaipedoje'},#Klaipėda {'id': '201', 'nom': 'kretingoje'},#Kretinga", "in range(2, number_of_pages + 1): url_of_one_page_of_results = url_search + \"/\"", "if number_of_pages > 1: for i in range(2, number_of_pages+1): url_of_one_page_of_results", "capitale in capitales_du_monde: for activite in activites: try: activity =", "Parse the content of html_doc soup = BeautifulSoup(html.content, 'html.parser') if", "'birstone'},#Birštonas {'id': '187', 'nom': 'birzuose'},#Biržai {'id': '188', 'nom': 'druskininkuose'},#Druskininkai {'id':", "'26', 'url': 'doctor'}, #{'id': '27', 'url': 'accounting'}, #{'id': '28', 'url':", "\"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost',", "'nom': 'svencionyse'},#Švenčionys {'id': '227', 'nom': 'taurageje'},#Tauragė {'id': '228', 'nom': 'telsiuose'},#Telšiai", "connection.commit() print(str(i_1) + \" The record is stored : \"", "if soup_search.find('div', {'class': 'company_list'}) is not None: print(url_search) for result_item", "'nom': 'kalvarijoje'},#Kalvarija {'id': '196', 'nom': 'kaune'},#Kaunas {'id': '197', 'nom': 'kazlu-rudoje'},#Kazlų", "= url_search + \"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results =", "'moletuose'},#Molėtai {'id': '207', 'nom': 'neringoje'},#Neringa {'id': '208', 'nom': 'pagegiuose'},#Pagėgiai {'id':", "{'class': 'search_count f_left'}) .find('span').text )/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages", "'email'}).text.split(\"@\")[1] print(str(i_1) + ' email : ' + email) else:", "'nom': 'moletuose'},#Molėtai {'id': '207', 'nom': 'neringoje'},#Neringa {'id': '208', 'nom': 'pagegiuose'},#Pagėgiai", "{'id': '191', 'nom': 'jonavoje'},#Jonava {'id': '192', 'nom': 'joniskyje'},#Joniškis {'id': '193',", "'company-item-title'}).get('href') time.sleep(2) # Request the content of a page from", "try: connection = pymysql.connect( host='localhost', port=3306, user='', password='', db='contacts_professionnels', charset='utf8mb4',", "number_of_pages + 1): url_of_one_page_of_results = url_search + \"/\" + str(i)", "'sirvintose'},#Širvintos {'id': '225', 'nom': 'skuode'},#Skuodas {'id': '226', 'nom': 'svencionyse'},#Švenčionys {'id':", "None: for result_item in soup_of_one_page_of_results\\ .find('div', {'class': 'company_list'})\\ .find_all('div', {'class':", "'214', 'nom': 'prienuose'},#Prienai {'id': '215', 'nom': 'radviliskyje'},#Radviliškis {'id': '216', 'nom':", "'34', 'url': 'architect'} ] capitales_du_monde = [ {'id': '183', 'nom':", "str(e)) else: print(str(i_1) + ' no email business') else: print('sorry", "with connection.cursor() as cursor: try: sql = \"INSERT INTO `emails`", "try: sql = \"INSERT INTO `emails` (\" \\ \"`id_activite`, \"", "url_search + \"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results)", "'bakery'}, #{'id': '16', 'url': 'insurance'}, #{'id': '17', 'url': 'pharmacy'}, #{'id':", "url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request the content of a page", "None: number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text )/20", "email : ' + email) else: print(str(i_1) + ' no", "'nom': 'raseiniuose'},#Raseiniai {'id': '217', 'nom': 'rietave'},#Rietavas {'id': '218', 'nom': 'rokiskyje'},#Rokiškis", "Exception as e: print(str(i_1) + \" An error with the", "'url': 'architect'} ] capitales_du_monde = [ {'id': '183', 'nom': 'akmeneje'},#Akmenė", ": \" + email) connection.close() except Exception as e: print(str(i_1)", "\\ \"`id_capitale_du_monde`, \" \\ \"`email`) VALUE (%s, %s, %s)\" cursor.execute(sql,", "'nom': 'siauliuose'},#Šiauliai {'id': '222', 'nom': 'silaleje'},#Šilalė {'id': '223', 'nom': 'siluteje'},#Šilutė", "#{'id': '13', 'url': 'florist'}, #{'id': '14', 'url': 'locksmith'}, #{'id': '15',", "'227', 'nom': 'taurageje'},#Tauragė {'id': '228', 'nom': 'telsiuose'},#Telšiai {'id': '229', 'nom':", "'labour'} #{'id': '2', 'url': 'real+estate'}, #{'id': '3', 'url': 'recruitment'}, #{'id':", "a page from the url html_result = requests.get(url_result) # Parse", "'library'}, #{'id': '34', 'url': 'architect'} ] capitales_du_monde = [ {'id':", "#{'id': '8', 'url': 'association'}, #{'id': '9', 'url': 'financial'}, #{'id': '10',", "'nom': 'kaisiadoryse'},#Kaišiadorys {'id': '195', 'nom': 'kalvarijoje'},#Kalvarija {'id': '196', 'nom': 'kaune'},#Kaunas", "i in range(2, number_of_pages+1): url_of_one_page_of_results = url_search + \"/\" +", "'212', 'nom': 'pasvalyje'},#Pasvalys {'id': '213', 'nom': 'plungeje'},#Plungė {'id': '214', 'nom':", "'hairdresser'}, #{'id': '13', 'url': 'florist'}, #{'id': '14', 'url': 'locksmith'}, #{'id':", "{'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1) + ' email : ' + email)", "'siluteje'},#Šilutė {'id': '224', 'nom': 'sirvintose'},#Širvintos {'id': '225', 'nom': 'skuode'},#Skuodas {'id':", "#{'id': '28', 'url': 'store'}, #{'id': '29', 'url': 'notary'}, #{'id': '30',", "'222', 'nom': 'silaleje'},#Šilalė {'id': '223', 'nom': 'siluteje'},#Šilutė {'id': '224', 'nom':", "+ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1) + ' email : '", "capitale.get('id'), email)) connection.commit() print(str(i_1) + \" The record is stored", "pymysql.connect( host='localhost', port=3306, user='', password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with", "'nom': 'varenoje'},#Varėna {'id': '233', 'nom': 'vilkaviskyje'},#Vilkaviškis {'id': '234', 'nom': 'vilniuje'},#Vilnius", "i in range(2, number_of_pages + 1): url_of_one_page_of_results = url_search +", "BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None: for", "{'itemprop': 'email'}) is not None: email = \"info@\" + soup.find('a',", "{'id': '197', 'nom': 'kazlu-rudoje'},#Kazlų Rūda {'id': '198', 'nom': 'kedainiuose'},#Kėdainiai {'id':", "connection = pymysql.connect( host='localhost', port=3306, user='', password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor", "{'id': '230', 'nom': 'ukmergeje'},#Ukmergė {'id': '231', 'nom': 'utenoje'},#Utena {'id': '232',", "for capitale in capitales_du_monde: for activite in activites: try: activity", "{'id': '220', 'nom': 'salcininkuose'},#Šalčininkai {'id': '221', 'nom': 'siauliuose'},#Šiauliai {'id': '222',", "as e: print(str(i_1) + \" An error with the email", "'18', 'url': 'moving'}, #{'id': '19', 'url': 'electricity'}, #{'id': '20', 'url':", "'4', 'url': 'software'}, #{'id': '5', 'url': 'hotel'}, #{'id': '6', 'url':", "'199', 'nom': 'kelmeje'},#Kelmė {'id': '200', 'nom': 'klaipedoje'},#Klaipėda {'id': '201', 'nom':", "\" + email) connection.close() except: print(str(i_1) + \" The record", "+ \" The record already exists : \" + email)", "= requests.get(url_search) soup_search = BeautifulSoup(html_search.content, 'html.parser') number_of_pages = 0 if", "soup_of_one_page_of_results\\ .find('div', {'class': 'company_list'})\\ .find_all('div', {'class': 'item'}): i_1 += 1", "business') else: print('sorry there is nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites =", "'store'}, #{'id': '29', 'url': 'notary'}, #{'id': '30', 'url': 'jeweller'}, #{'id':", "'209', 'nom': 'pakruojyje'},#Pakruojis {'id': '210', 'nom': 'palangoje'},#Palanga {'id': '211', 'nom':", "number_of_pages > 1: for i in range(2, number_of_pages+1): url_of_one_page_of_results =", "nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites = [ # {'id': '1', 'url':", "not None: email = \"info@\" + \\ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1]", "print('sorry there is nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites = [ #", "\" \" + str(e)) else: print(str(i_1) + ' no email", "'birzuose'},#Biržai {'id': '188', 'nom': 'druskininkuose'},#Druskininkai {'id': '189', 'nom': 'elektrenuose'},#Elektrėnai {'id':", "{'id': '225', 'nom': 'skuode'},#Skuodas {'id': '226', 'nom': 'svencionyse'},#Švenčionys {'id': '227',", "Rūda {'id': '198', 'nom': 'kedainiuose'},#Kėdainiai {'id': '199', 'nom': 'kelmeje'},#Kelmė {'id':", "= \"hotel\" city = \"vilniuje\" url_search = \"https://www.visalietuva.lt/en/search/\" + activity", "result_item in soup_search \\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class':", "'12', 'url': 'hairdresser'}, #{'id': '13', 'url': 'florist'}, #{'id': '14', 'url':", "{'id': '235', 'nom': 'visagine'},#Visaginas {'id': '236', 'nom': 'zarasuose'}#Zarasai ] try:", "+ email) else: print(str(i_1) + ' no email business') else:", "{'id': '231', 'nom': 'utenoje'},#Utena {'id': '232', 'nom': 'varenoje'},#Varėna {'id': '233',", "'232', 'nom': 'varenoje'},#Varėna {'id': '233', 'nom': 'vilkaviskyje'},#Vilkaviškis {'id': '234', 'nom':", "'email'}) is not None: email = \"info@\" + soup.find('a', {'itemprop':", "'url': 'jeweller'}, #{'id': '31', 'url': 'tailor'}, #{'id': '32', 'url': 'meat'},", "'2', 'url': 'real+estate'}, #{'id': '3', 'url': 'recruitment'}, #{'id': '4', 'url':", "'company_list'}) is not None: for result_item in soup_of_one_page_of_results\\ .find('div', {'class':", "'item'}): i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2)", "+ str(number_of_pages)) i_1 = 0 if soup_search.find('div', {'class': 'company_list'}) is", "'html.parser') if soup_result.find('a', {'itemprop': 'email'}) is not None: email =", "'landlord'}, #{'id': '7', 'url': 'cleaning'}, #{'id': '8', 'url': 'association'}, #{'id':", "{'id': '206', 'nom': 'moletuose'},#Molėtai {'id': '207', 'nom': 'neringoje'},#Neringa {'id': '208',", "'garage'}, #{'id': '25', 'url': 'dentist'}, #{'id': '26', 'url': 'doctor'}, #{'id':", "'nom': 'pakruojyje'},#Pakruojis {'id': '210', 'nom': 'palangoje'},#Palanga {'id': '211', 'nom': 'panevezyje'},#Panevėžys", "capitales_du_monde = [ {'id': '183', 'nom': 'akmeneje'},#Akmenė {'id': '184', 'nom':", "no email business') else: print('sorry there is nothing') if number_of_pages", "\"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1) + ' email :", "'url': 'hotel'}, #{'id': '6', 'url': 'landlord'}, #{'id': '7', 'url': 'cleaning'},", "'11', 'url': 'building'}, #{'id': '12', 'url': 'hairdresser'}, #{'id': '13', 'url':", "'196', 'nom': 'kaune'},#Kaunas {'id': '197', 'nom': 'kazlu-rudoje'},#Kazlų Rūda {'id': '198',", "'nom': 'rietave'},#Rietavas {'id': '218', 'nom': 'rokiskyje'},#Rokiškis {'id': '219', 'nom': 'sakiuose'},#Šakiai", "'url': 'library'}, #{'id': '34', 'url': 'architect'} ] capitales_du_monde = [", "<gh_stars>1-10 import time from bs4 import BeautifulSoup import requests import", "The record already exists : \" + email) connection.close() except", "INTO `emails` (\" \\ \"`id_activite`, \" \\ \"`id_capitale_du_monde`, \" \\", "{'class': 'company_list'}) is not None: for result_item in soup_of_one_page_of_results \\", "requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is", ">= 5: number_of_pages += round(number_of_pages_with_coma) print('number_of_pages : ' + str(number_of_pages))", "no email business') else: print('sorry there is nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self):", "soup.find('a', {'itemprop': 'email'}) is not None: email = \"info@\" +", "'silaleje'},#Šilalė {'id': '223', 'nom': 'siluteje'},#Šilutė {'id': '224', 'nom': 'sirvintose'},#Širvintos {'id':", "'insurance'}, #{'id': '17', 'url': 'pharmacy'}, #{'id': '18', 'url': 'moving'}, #{'id':", "'20', 'url': 'plumbing'}, #{'id': '21', 'url': 'security'}, #{'id': '22', 'url':", "\"`id_activite`, \" \\ \"`id_capitale_du_monde`, \" \\ \"`email`) VALUE (%s, %s,", "> 1: for i in range(2, number_of_pages + 1): url_of_one_page_of_results", "'anyksciuose'},#Anykščiai {'id': '186', 'nom': 'birstone'},#Birštonas {'id': '187', 'nom': 'birzuose'},#Biržai {'id':", "'202', 'nom': 'kupiskyje'},#Kupiškis {'id': '203', 'nom': 'lazdijuose'},#Lazdijai {'id': '204', 'nom':", "'nom': 'prienuose'},#Prienai {'id': '215', 'nom': 'radviliskyje'},#Radviliškis {'id': '216', 'nom': 'raseiniuose'},#Raseiniai", "[ # {'id': '1', 'url': 'labour'} #{'id': '2', 'url': 'real+estate'},", "#{'id': '16', 'url': 'insurance'}, #{'id': '17', 'url': 'pharmacy'}, #{'id': '18',", "email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection =", ": ' + str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5: number_of_pages +=", "except Exception as e: print(str(i_1) + \" An error with", "{'class': 'company-item-title'}).get('href') time.sleep(2) # Request the content of a page", "#{'id': '33', 'url': 'library'}, #{'id': '34', 'url': 'architect'} ] capitales_du_monde", "'nom': 'salcininkuose'},#Šalčininkai {'id': '221', 'nom': 'siauliuose'},#Šiauliai {'id': '222', 'nom': 'silaleje'},#Šilalė", "\"/\" + city html_search = requests.get(url_search) soup_search = BeautifulSoup(html_search.content, 'html.parser')", "'url': 'software'}, #{'id': '5', 'url': 'hotel'}, #{'id': '6', 'url': 'landlord'},", "html = requests.get(url) # Parse the content of html_doc soup", "'palangoje'},#Palanga {'id': '211', 'nom': 'panevezyje'},#Panevėžys {'id': '212', 'nom': 'pasvalyje'},#Pasvalys {'id':", "email business') else: print('sorry there is nothing') except Exception as", "elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5: number_of_pages += round(number_of_pages_with_coma) print('number_of_pages : '", "'mazeikiuose'},#Mažeikiai {'id': '206', 'nom': 'moletuose'},#Molėtai {'id': '207', 'nom': 'neringoje'},#Neringa {'id':", "email)) connection.commit() print(str(i_1) + \" The record is stored :", "'188', 'nom': 'druskininkuose'},#Druskininkai {'id': '189', 'nom': 'elektrenuose'},#Elektrėnai {'id': '190', 'nom':", "+ city html_search = requests.get(url_search) soup_search = BeautifulSoup(html_search.content, 'html.parser') number_of_pages", "url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\" + city html_search", "'228', 'nom': 'telsiuose'},#Telšiai {'id': '229', 'nom': 'trakuose'},#Trakai {'id': '230', 'nom':", "range(2, number_of_pages + 1): url_of_one_page_of_results = url_search + \"/\" +", "number_of_pages+1): url_of_one_page_of_results = url_search + \"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2)", "int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text ) / 20 if", "'varenoje'},#Varėna {'id': '233', 'nom': 'vilkaviskyje'},#Vilkaviškis {'id': '234', 'nom': 'vilniuje'},#Vilnius {'id':", "+ 1): url_of_one_page_of_results = url_search + \"/\" + str(i) print(url_of_one_page_of_results)", "{'id': '211', 'nom': 'panevezyje'},#Panevėžys {'id': '212', 'nom': 'pasvalyje'},#Pasvalys {'id': '213',", "not None: email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try:", "'213', 'nom': 'plungeje'},#Plungė {'id': '214', 'nom': 'prienuose'},#Prienai {'id': '215', 'nom':", "e: print(\"There is an error connection at url : \"", "charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with connection.cursor() as cursor: try: sql =", "url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2) # Request the content", "1 print('number_of_pages : ' + str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5:", "' no email business') else: print('sorry there is nothing') def", "connection.close() except: print(str(i_1) + \" The record already exists :", "None: email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1) +", "'nom': 'kupiskyje'},#Kupiškis {'id': '203', 'nom': 'lazdijuose'},#Lazdijai {'id': '204', 'nom': 'marijampoleje'},#Marijampolė", "email business') else: print('sorry there is nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites", "email) connection.close() except: print(str(i_1) + \" The record already exists", "'192', 'nom': 'joniskyje'},#Joniškis {'id': '193', 'nom': 'jurbarke'},#Jurbarkas {'id': '194', 'nom':", "'url': 'locksmith'}, #{'id': '15', 'url': 'bakery'}, #{'id': '16', 'url': 'insurance'},", "\" + str(e)) else: print(str(i_1) + ' no email business')", "= BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop': 'email'}) is not None:", "i_1 = 0 if soup_search.find('div', {'class': 'company_list'}) is not None:", "'kaune'},#Kaunas {'id': '197', 'nom': 'kazlu-rudoje'},#Kazlų Rūda {'id': '198', 'nom': 'kedainiuose'},#Kėdainiai", "{'id': '233', 'nom': 'vilkaviskyje'},#Vilkaviškis {'id': '234', 'nom': 'vilniuje'},#Vilnius {'id': '235',", "#{'id': '9', 'url': 'financial'}, #{'id': '10', 'url': 'restaurant'}, #{'id': '11',", "'taurageje'},#Tauragė {'id': '228', 'nom': 'telsiuose'},#Telšiai {'id': '229', 'nom': 'trakuose'},#Trakai {'id':", "+= 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') # Request the", "'real+estate'}, #{'id': '3', 'url': 'recruitment'}, #{'id': '4', 'url': 'software'}, #{'id':", "'url': 'bakery'}, #{'id': '16', 'url': 'insurance'}, #{'id': '17', 'url': 'pharmacy'},", "requests.get(url_search) soup_search = BeautifulSoup(html_search.content, 'html.parser') number_of_pages = 0 if soup_search.find('div',", "'229', 'nom': 'trakuose'},#Trakai {'id': '230', 'nom': 'ukmergeje'},#Ukmergė {'id': '231', 'nom':", "\" The record already exists : \" + email) connection.close()", "'url': 'security'}, #{'id': '22', 'url': 'lawyer'}, #{'id': '23', 'url': 'bank'},", "is not None: email = \"info@\" + soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1]", "not None: print(url_search) for result_item in soup_search \\ .find('div', {'class':", "time from bs4 import BeautifulSoup import requests import pymysql.cursors import", ") with connection.cursor() as cursor: try: sql = \"INSERT INTO", "round(number_of_pages_with_coma) print('number_of_pages : ' + str(number_of_pages)) i_1 = 0 if", "number_of_pages > 1: for i in range(2, number_of_pages + 1):", "print('sorry there is nothing') if number_of_pages > 1: for i", "\"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results =", "'svencionyse'},#Švenčionys {'id': '227', 'nom': 'taurageje'},#Tauragė {'id': '228', 'nom': 'telsiuose'},#Telšiai {'id':", ".find('span').text ) / 20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages +=", "+= round(number_of_pages_with_coma) print('number_of_pages : ' + str(number_of_pages)) i_1 = 0", "+ str(e)) else: print(str(i_1) + ' no email business') else:", "in soup_of_one_page_of_results \\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}):", "nothing') if number_of_pages > 1: for i in range(2, number_of_pages+1):", "'nom': 'panevezyje'},#Panevėžys {'id': '212', 'nom': 'pasvalyje'},#Pasvalys {'id': '213', 'nom': 'plungeje'},#Plungė", "'nom': 'siluteje'},#Šilutė {'id': '224', 'nom': 'sirvintose'},#Širvintos {'id': '225', 'nom': 'skuode'},#Skuodas", "print(str(i_1) + ' email : ' + email) else: print(str(i_1)", "'nom': 'telsiuose'},#Telšiai {'id': '229', 'nom': 'trakuose'},#Trakai {'id': '230', 'nom': 'ukmergeje'},#Ukmergė", "{'id': '215', 'nom': 'radviliskyje'},#Radviliškis {'id': '216', 'nom': 'raseiniuose'},#Raseiniai {'id': '217',", "f_left'}) .find('span').text ) / 20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages", "'223', 'nom': 'siluteje'},#Šilutė {'id': '224', 'nom': 'sirvintose'},#Širvintos {'id': '225', 'nom':", "exists : \" + email) connection.close() except Exception as e:", "'hotel'}, #{'id': '6', 'url': 'landlord'}, #{'id': '7', 'url': 'cleaning'}, #{'id':", "'5', 'url': 'hotel'}, #{'id': '6', 'url': 'landlord'}, #{'id': '7', 'url':", "= activite.get(\"url\") city = capitale.get(\"nom\") url_search = \"https://www.visalietuva.lt/en/search/\" + activity", "'8', 'url': 'association'}, #{'id': '9', 'url': 'financial'}, #{'id': '10', 'url':", "#{'id': '11', 'url': 'building'}, #{'id': '12', 'url': 'hairdresser'}, #{'id': '13',", "print(str(i_1) + \" The record already exists : \" +", "'company_list'}) \\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result =", "pymysql.cursors import unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self): url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\"", ": \" + str(e)) finally: print('done') if __name__ == '__main__':", "no email business') else: print('sorry there is nothing') except Exception", "url html = requests.get(url) # Parse the content of html_doc", "'siauliuose'},#Šiauliai {'id': '222', 'nom': 'silaleje'},#Šilalė {'id': '223', 'nom': 'siluteje'},#Šilutė {'id':", "+ email + \" \" + str(e)) else: print(str(i_1) +", "'url': 'building'}, #{'id': '12', 'url': 'hairdresser'}, #{'id': '13', 'url': 'florist'},", "'plumbing'}, #{'id': '21', 'url': 'security'}, #{'id': '22', 'url': 'lawyer'}, #{'id':", "'236', 'nom': 'zarasuose'}#Zarasai ] try: for capitale in capitales_du_monde: for", "email = \"info@\" + soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print('email : '", "'cleaning'}, #{'id': '8', 'url': 'association'}, #{'id': '9', 'url': 'financial'}, #{'id':", "+ email) connection.close() except Exception as e: print(str(i_1) + \"", "VALUE (%s, %s, %s)\" cursor.execute(sql, ( activite.get('id'), capitale.get('id'), email)) connection.commit()", "{'id': '216', 'nom': 'raseiniuose'},#Raseiniai {'id': '217', 'nom': 'rietave'},#Rietavas {'id': '218',", "'218', 'nom': 'rokiskyje'},#Rokiškis {'id': '219', 'nom': 'sakiuose'},#Šakiai {'id': '220', 'nom':", "requests.get(url_result) # Parse the content of html_doc soup_result = BeautifulSoup(html_result.content,", "'search_count f_left'}) .find('span').text ) / 20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5:", "error with the email : \" + email + \"", "None: email = \"info@\" + \\ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try:", "print(\"There is an error connection at url : \" +", "f_left'}) .find('span').text )/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma)", "'nom': 'silaleje'},#Šilalė {'id': '223', 'nom': 'siluteje'},#Šilutė {'id': '224', 'nom': 'sirvintose'},#Širvintos", "'nom': 'skuode'},#Skuodas {'id': '226', 'nom': 'svencionyse'},#Švenčionys {'id': '227', 'nom': 'taurageje'},#Tauragė", "{'id': '228', 'nom': 'telsiuose'},#Telšiai {'id': '229', 'nom': 'trakuose'},#Trakai {'id': '230',", "\" \\ \"`email`) VALUE (%s, %s, %s)\" cursor.execute(sql, ( activite.get('id'),", "'kedainiuose'},#Kėdainiai {'id': '199', 'nom': 'kelmeje'},#Kelmė {'id': '200', 'nom': 'klaipedoje'},#Klaipėda {'id':", "'lawyer'}, #{'id': '23', 'url': 'bank'}, #{'id': '24', 'url': 'garage'}, #{'id':", "{'id': '213', 'nom': 'plungeje'},#Plungė {'id': '214', 'nom': 'prienuose'},#Prienai {'id': '215',", "= \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect(", "business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity = \"hotel\" city = \"vilniuje\" url_search", "int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5: number_of_pages += round(number_of_pages_with_coma) print('number_of_pages : ' +", "{'itemprop': 'email'}) is not None: email = \"info@\" + \\", "url_of_one_page_of_results = url_search + \"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results", "'url': 'tailor'}, #{'id': '32', 'url': 'meat'}, #{'id': '33', 'url': 'library'},", "try: for capitale in capitales_du_monde: for activite in activites: try:", "class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self): url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request the", "\"info@\" + soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print('email : ' + email)", "'kupiskyje'},#Kupiškis {'id': '203', 'nom': 'lazdijuose'},#Lazdijai {'id': '204', 'nom': 'marijampoleje'},#Marijampolė {'id':", "'joniskyje'},#Joniškis {'id': '193', 'nom': 'jurbarke'},#Jurbarkas {'id': '194', 'nom': 'kaisiadoryse'},#Kaišiadorys {'id':", "int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) + 1 print('number_of_pages :", "'kalvarijoje'},#Kalvarija {'id': '196', 'nom': 'kaune'},#Kaunas {'id': '197', 'nom': 'kazlu-rudoje'},#Kazlų Rūda", "' no email business') else: print('sorry there is nothing') except", "number_of_pages = 0 if soup_search.find('div', {'class': 'search_count f_left'}) is not", "connection at url : \" + str(e)) finally: print('done') if", "print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if", "url html_result = requests.get(url_result) # Parse the content of html_doc", "in soup_of_one_page_of_results\\ .find('div', {'class': 'company_list'})\\ .find_all('div', {'class': 'item'}): i_1 +=", "'ignalinoje'},#Ignalina {'id': '191', 'nom': 'jonavoje'},#Jonava {'id': '192', 'nom': 'joniskyje'},#Joniškis {'id':", "{'id': '226', 'nom': 'svencionyse'},#Švenčionys {'id': '227', 'nom': 'taurageje'},#Tauragė {'id': '228',", "\" + str(e)) finally: print('done') if __name__ == '__main__': unittest.main()", "{'id': '202', 'nom': 'kupiskyje'},#Kupiškis {'id': '203', 'nom': 'lazdijuose'},#Lazdijai {'id': '204',", "'architect'} ] capitales_du_monde = [ {'id': '183', 'nom': 'akmeneje'},#Akmenė {'id':", "'sakiuose'},#Šakiai {'id': '220', 'nom': 'salcininkuose'},#Šalčininkai {'id': '221', 'nom': 'siauliuose'},#Šiauliai {'id':", "result_item in soup_of_one_page_of_results \\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class':", "+ \" \" + str(e)) else: print(str(i_1) + ' no", "there is nothing') except Exception as e: print(\"There is an", "'bank'}, #{'id': '24', 'url': 'garage'}, #{'id': '25', 'url': 'dentist'}, #{'id':", "'233', 'nom': 'vilkaviskyje'},#Vilkaviškis {'id': '234', 'nom': 'vilniuje'},#Vilnius {'id': '235', 'nom':", "print('sorry there is nothing') except Exception as e: print(\"There is", "\\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result = result_item.find('a',", "] capitales_du_monde = [ {'id': '183', 'nom': 'akmeneje'},#Akmenė {'id': '184',", "' + email) else: print(str(i_1) + ' no email business')", "'utenoje'},#Utena {'id': '232', 'nom': 'varenoje'},#Varėna {'id': '233', 'nom': 'vilkaviskyje'},#Vilkaviškis {'id':", "{'class': 'company_list'}) is not None: print(url_search) for result_item in soup_search", "'company_list'}) is not None: print(url_search) for result_item in soup_search \\", "'nom': 'klaipedoje'},#Klaipėda {'id': '201', 'nom': 'kretingoje'},#Kretinga {'id': '202', 'nom': 'kupiskyje'},#Kupiškis", "is nothing') if number_of_pages > 1: for i in range(2,", "activite in activites: try: activity = activite.get(\"url\") city = capitale.get(\"nom\")", "'205', 'nom': 'mazeikiuose'},#Mažeikiai {'id': '206', 'nom': 'moletuose'},#Molėtai {'id': '207', 'nom':", "'url': 'meat'}, #{'id': '33', 'url': 'library'}, #{'id': '34', 'url': 'architect'}", "'lazdijuose'},#Lazdijai {'id': '204', 'nom': 'marijampoleje'},#Marijampolė {'id': '205', 'nom': 'mazeikiuose'},#Mažeikiai {'id':", "str(number_of_pages)) i_1 = 0 if soup_search.find('div', {'class': 'company_list'}) is not", "activite.get(\"url\") city = capitale.get(\"nom\") url_search = \"https://www.visalietuva.lt/en/search/\" + activity +", "\\ \"`id_activite`, \" \\ \"`id_capitale_du_monde`, \" \\ \"`email`) VALUE (%s,", "nothing') except Exception as e: print(\"There is an error connection", "\\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}): i_1 +=", "i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') # Request", "#{'id': '3', 'url': 'recruitment'}, #{'id': '4', 'url': 'software'}, #{'id': '5',", "'url': 'plumbing'}, #{'id': '21', 'url': 'security'}, #{'id': '22', 'url': 'lawyer'},", "of a page from the url html = requests.get(url) #", "there is nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites = [ # {'id':", "as cursor: try: sql = \"INSERT INTO `emails` (\" \\", "bs4 import BeautifulSoup import requests import pymysql.cursors import unittest class", "+ str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content,", "{'class': 'company_list'}) is not None: for result_item in soup_of_one_page_of_results\\ .find('div',", "import requests import pymysql.cursors import unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self):", "is not None: email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1]", "'nom': 'trakuose'},#Trakai {'id': '230', 'nom': 'ukmergeje'},#Ukmergė {'id': '231', 'nom': 'utenoje'},#Utena", "'vilniuje'},#Vilnius {'id': '235', 'nom': 'visagine'},#Visaginas {'id': '236', 'nom': 'zarasuose'}#Zarasai ]", "html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop': 'email'}) is", "#{'id': '29', 'url': 'notary'}, #{'id': '30', 'url': 'jeweller'}, #{'id': '31',", "'nom': 'jonavoje'},#Jonava {'id': '192', 'nom': 'joniskyje'},#Joniškis {'id': '193', 'nom': 'jurbarke'},#Jurbarkas", "'226', 'nom': 'svencionyse'},#Švenčionys {'id': '227', 'nom': 'taurageje'},#Tauragė {'id': '228', 'nom':", "for i in range(2, number_of_pages+1): url_of_one_page_of_results = url_search + \"/\"", "'191', 'nom': 'jonavoje'},#Jonava {'id': '192', 'nom': 'joniskyje'},#Joniškis {'id': '193', 'nom':", "'url': 'electricity'}, #{'id': '20', 'url': 'plumbing'}, #{'id': '21', 'url': 'security'},", "'215', 'nom': 'radviliskyje'},#Radviliškis {'id': '216', 'nom': 'raseiniuose'},#Raseiniai {'id': '217', 'nom':", "if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None: for result_item in", "+ ' no email business') else: print('sorry there is nothing')", "{'id': '203', 'nom': 'lazdijuose'},#Lazdijai {'id': '204', 'nom': 'marijampoleje'},#Marijampolė {'id': '205',", "`emails` (\" \\ \"`id_activite`, \" \\ \"`id_capitale_du_monde`, \" \\ \"`email`)", "{'class': 'company-item-title'}).get('href') # Request the content of a page from", "is stored : \" + email) connection.close() except: print(str(i_1) +", "'association'}, #{'id': '9', 'url': 'financial'}, #{'id': '10', 'url': 'restaurant'}, #{'id':", "is nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites = [ # {'id': '1',", "'jeweller'}, #{'id': '31', 'url': 'tailor'}, #{'id': '32', 'url': 'meat'}, #{'id':", ".find('div', {'class': 'search_count f_left'}) .find('span').text ) / 20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1])", "\" \\ \"`id_capitale_du_monde`, \" \\ \"`email`) VALUE (%s, %s, %s)\"", "None: for result_item in soup_of_one_page_of_results \\ .find('div', {'class': 'company_list'}) \\", "'190', 'nom': 'ignalinoje'},#Ignalina {'id': '191', 'nom': 'jonavoje'},#Jonava {'id': '192', 'nom':", "activites = [ # {'id': '1', 'url': 'labour'} #{'id': '2',", "i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2) #", "= 0 if soup_search.find('div', {'class': 'search_count f_left'}) is not None:", "'visagine'},#Visaginas {'id': '236', 'nom': 'zarasuose'}#Zarasai ] try: for capitale in", "{'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost', port=3306, user='', password='',", "password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with connection.cursor() as cursor: try:", "email) else: print(str(i_1) + ' no email business') else: print('sorry", "'url': 'cleaning'}, #{'id': '8', 'url': 'association'}, #{'id': '9', 'url': 'financial'},", "soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1) + ' email : ' +", "'trakuose'},#Trakai {'id': '230', 'nom': 'ukmergeje'},#Ukmergė {'id': '231', 'nom': 'utenoje'},#Utena {'id':", "%s, %s)\" cursor.execute(sql, ( activite.get('id'), capitale.get('id'), email)) connection.commit() print(str(i_1) +", "'neringoje'},#Neringa {'id': '208', 'nom': 'pagegiuose'},#Pagėgiai {'id': '209', 'nom': 'pakruojyje'},#Pakruojis {'id':", "'url': 'store'}, #{'id': '29', 'url': 'notary'}, #{'id': '30', 'url': 'jeweller'},", ": ' + email) else: print(str(i_1) + ' no email", "there is nothing') if number_of_pages > 1: for i in", "An error with the email : \" + email +", "'22', 'url': 'lawyer'}, #{'id': '23', 'url': 'bank'}, #{'id': '24', 'url':", "'198', 'nom': 'kedainiuose'},#Kėdainiai {'id': '199', 'nom': 'kelmeje'},#Kelmė {'id': '200', 'nom':", "'nom': 'utenoje'},#Utena {'id': '232', 'nom': 'varenoje'},#Varėna {'id': '233', 'nom': 'vilkaviskyje'},#Vilkaviškis", "{'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result", "host='localhost', port=3306, user='', password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with connection.cursor()", "'216', 'nom': 'raseiniuose'},#Raseiniai {'id': '217', 'nom': 'rietave'},#Rietavas {'id': '218', 'nom':", "email : \" + email + \" \" + str(e))", "+ activity + \"/\" + city html_search = requests.get(url_search) soup_search", "= BeautifulSoup(html_search.content, 'html.parser') number_of_pages = 0 if soup_search.find('div', {'class': 'search_count", "'187', 'nom': 'birzuose'},#Biržai {'id': '188', 'nom': 'druskininkuose'},#Druskininkai {'id': '189', 'nom':", "'nom': 'elektrenuose'},#Elektrėnai {'id': '190', 'nom': 'ignalinoje'},#Ignalina {'id': '191', 'nom': 'jonavoje'},#Jonava", "is an error connection at url : \" + str(e))", "from bs4 import BeautifulSoup import requests import pymysql.cursors import unittest", ": ' + str(number_of_pages)) i_1 = 0 if soup_search.find('div', {'class':", "> 1: for i in range(2, number_of_pages+1): url_of_one_page_of_results = url_search", "'url': 'hairdresser'}, #{'id': '13', 'url': 'florist'}, #{'id': '14', 'url': 'locksmith'},", "number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text ) /", "import unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self): url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" #", "'dentist'}, #{'id': '26', 'url': 'doctor'}, #{'id': '27', 'url': 'accounting'}, #{'id':", "user='', password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with connection.cursor() as cursor:", "{'id': '185', 'nom': 'anyksciuose'},#Anykščiai {'id': '186', 'nom': 'birstone'},#Birštonas {'id': '187',", "Request the content of a page from the url html_result", "soup_result = BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop': 'email'}) is not", "\" + email) connection.close() except Exception as e: print(str(i_1) +", "'15', 'url': 'bakery'}, #{'id': '16', 'url': 'insurance'}, #{'id': '17', 'url':", "'14', 'url': 'locksmith'}, #{'id': '15', 'url': 'bakery'}, #{'id': '16', 'url':", "] try: for capitale in capitales_du_monde: for activite in activites:", "html_search = requests.get(url_search) soup_search = BeautifulSoup(html_search.content, 'html.parser') number_of_pages = 0", "'url': 'recruitment'}, #{'id': '4', 'url': 'software'}, #{'id': '5', 'url': 'hotel'},", "{'id': '199', 'nom': 'kelmeje'},#Kelmė {'id': '200', 'nom': 'klaipedoje'},#Klaipėda {'id': '201',", "at url : \" + str(e)) finally: print('done') if __name__", "is not None: for result_item in soup_of_one_page_of_results \\ .find('div', {'class':", "'email'}).text.split(\"@\")[1] print('email : ' + email) else: print('no email business')", "' + str(number_of_pages)) i_1 = 0 if soup_search.find('div', {'class': 'company_list'})", "html_result = requests.get(url_result) # Parse the content of html_doc soup_result", "'item'}): i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') #", "'doctor'}, #{'id': '27', 'url': 'accounting'}, #{'id': '28', 'url': 'store'}, #{'id':", "'29', 'url': 'notary'}, #{'id': '30', 'url': 'jeweller'}, #{'id': '31', 'url':", "{'id': '210', 'nom': 'palangoje'},#Palanga {'id': '211', 'nom': 'panevezyje'},#Panevėžys {'id': '212',", ") / 20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma)", "'235', 'nom': 'visagine'},#Visaginas {'id': '236', 'nom': 'zarasuose'}#Zarasai ] try: for", "result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2) # Request the content of a", "{'id': '184', 'nom': 'alytuje'},#Alytus {'id': '185', 'nom': 'anyksciuose'},#Anykščiai {'id': '186',", "print(str(i_1) + \" The record is stored : \" +", "for activite in activites: try: activity = activite.get(\"url\") city =", "the content of a page from the url html_result =", "'16', 'url': 'insurance'}, #{'id': '17', 'url': 'pharmacy'}, #{'id': '18', 'url':", "print('no email business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity = \"hotel\" city =", "'url': 'florist'}, #{'id': '14', 'url': 'locksmith'}, #{'id': '15', 'url': 'bakery'},", "import pymysql.cursors import unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self): url =", "'13', 'url': 'florist'}, #{'id': '14', 'url': 'locksmith'}, #{'id': '15', 'url':", ".find('div', {'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}): i_1 += 1", "test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity = \"hotel\" city = \"vilniuje\" url_search = \"https://www.visalietuva.lt/en/search/\"", "f_left'}) is not None: number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count", "html_doc soup = BeautifulSoup(html.content, 'html.parser') if soup.find('a', {'itemprop': 'email'}) is", "'url': 'accounting'}, #{'id': '28', 'url': 'store'}, #{'id': '29', 'url': 'notary'},", "'ukmergeje'},#Ukmergė {'id': '231', 'nom': 'utenoje'},#Utena {'id': '232', 'nom': 'varenoje'},#Varėna {'id':", "'230', 'nom': 'ukmergeje'},#Ukmergė {'id': '231', 'nom': 'utenoje'},#Utena {'id': '232', 'nom':", "# Parse the content of html_doc soup = BeautifulSoup(html.content, 'html.parser')", ".find('div', {'class': 'search_count f_left'}) .find('span').text )/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5:", "in activites: try: activity = activite.get(\"url\") city = capitale.get(\"nom\") url_search", "None: email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection", "city = \"vilniuje\" url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\"", "else: print('sorry there is nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites = [", "email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1) + '", "not None: email = \"info@\" + soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print('email", "print('email : ' + email) else: print('no email business') def", "cursorclass=pymysql.cursors.DictCursor ) with connection.cursor() as cursor: try: sql = \"INSERT", "test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites = [ # {'id': '1', 'url': 'labour'} #{'id':", "'building'}, #{'id': '12', 'url': 'hairdresser'}, #{'id': '13', 'url': 'florist'}, #{'id':", "\"INSERT INTO `emails` (\" \\ \"`id_activite`, \" \\ \"`id_capitale_du_monde`, \"", "soup_search.find('div', {'class': 'company_list'}) is not None: print(url_search) for result_item in", "\" The record is stored : \" + email) connection.close()", "5: number_of_pages += round(number_of_pages_with_coma) + 1 print('number_of_pages : ' +", "= \"https://www.visalietuva.lt/en/search/\" + activity + \"/\" + city html_search =", "%s)\" cursor.execute(sql, ( activite.get('id'), capitale.get('id'), email)) connection.commit() print(str(i_1) + \"", "#{'id': '12', 'url': 'hairdresser'}, #{'id': '13', 'url': 'florist'}, #{'id': '14',", "#{'id': '20', 'url': 'plumbing'}, #{'id': '21', 'url': 'security'}, #{'id': '22',", "content of a page from the url html_result = requests.get(url_result)", "{'id': '227', 'nom': 'taurageje'},#Tauragė {'id': '228', 'nom': 'telsiuose'},#Telšiai {'id': '229',", "soup_search \\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}): i_1", "'security'}, #{'id': '22', 'url': 'lawyer'}, #{'id': '23', 'url': 'bank'}, #{'id':", "range(2, number_of_pages+1): url_of_one_page_of_results = url_search + \"/\" + str(i) print(url_of_one_page_of_results)", "soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost', port=3306, user='',", "#{'id': '10', 'url': 'restaurant'}, #{'id': '11', 'url': 'building'}, #{'id': '12',", "'17', 'url': 'pharmacy'}, #{'id': '18', 'url': 'moving'}, #{'id': '19', 'url':", "= \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request the content of a page from", "'231', 'nom': 'utenoje'},#Utena {'id': '232', 'nom': 'varenoje'},#Varėna {'id': '233', 'nom':", "'19', 'url': 'electricity'}, #{'id': '20', 'url': 'plumbing'}, #{'id': '21', 'url':", "#{'id': '2', 'url': 'real+estate'}, #{'id': '3', 'url': 'recruitment'}, #{'id': '4',", "'nom': 'sakiuose'},#Šakiai {'id': '220', 'nom': 'salcininkuose'},#Šalčininkai {'id': '221', 'nom': 'siauliuose'},#Šiauliai", "'pasvalyje'},#Pasvalys {'id': '213', 'nom': 'plungeje'},#Plungė {'id': '214', 'nom': 'prienuose'},#Prienai {'id':", "the content of html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a',", "'nom': 'plungeje'},#Plungė {'id': '214', 'nom': 'prienuose'},#Prienai {'id': '215', 'nom': 'radviliskyje'},#Radviliškis", "#{'id': '27', 'url': 'accounting'}, #{'id': '28', 'url': 'store'}, #{'id': '29',", "{'id': '194', 'nom': 'kaisiadoryse'},#Kaišiadorys {'id': '195', 'nom': 'kalvarijoje'},#Kalvarija {'id': '196',", "port=3306, user='', password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with connection.cursor() as", "'email'}) is not None: email = \"info@\" + \\ soup_result.find('a',", "except Exception as e: print(\"There is an error connection at", "'notary'}, #{'id': '30', 'url': 'jeweller'}, #{'id': '31', 'url': 'tailor'}, #{'id':", "+ email) connection.close() except: print(str(i_1) + \" The record already", "email + \" \" + str(e)) else: print(str(i_1) + '", "{'id': '196', 'nom': 'kaune'},#Kaunas {'id': '197', 'nom': 'kazlu-rudoje'},#Kazlų Rūda {'id':", "'email'}) is not None: email = \"info@\" + soup_result.find('a', {'itemprop':", "capitales_du_monde: for activite in activites: try: activity = activite.get(\"url\") city", "'nom': 'birzuose'},#Biržai {'id': '188', 'nom': 'druskininkuose'},#Druskininkai {'id': '189', 'nom': 'elektrenuose'},#Elektrėnai", "\"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request the content of a page from the", "'url': 'real+estate'}, #{'id': '3', 'url': 'recruitment'}, #{'id': '4', 'url': 'software'},", "'url': 'bank'}, #{'id': '24', 'url': 'garage'}, #{'id': '25', 'url': 'dentist'},", "BeautifulSoup(html.content, 'html.parser') if soup.find('a', {'itemprop': 'email'}) is not None: email", "'jonavoje'},#Jonava {'id': '192', 'nom': 'joniskyje'},#Joniškis {'id': '193', 'nom': 'jurbarke'},#Jurbarkas {'id':", "'204', 'nom': 'marijampoleje'},#Marijampolė {'id': '205', 'nom': 'mazeikiuose'},#Mažeikiai {'id': '206', 'nom':", "'florist'}, #{'id': '14', 'url': 'locksmith'}, #{'id': '15', 'url': 'bakery'}, #{'id':", "'nom': 'jurbarke'},#Jurbarkas {'id': '194', 'nom': 'kaisiadoryse'},#Kaišiadorys {'id': '195', 'nom': 'kalvarijoje'},#Kalvarija", "'nom': 'vilkaviskyje'},#Vilkaviškis {'id': '234', 'nom': 'vilniuje'},#Vilnius {'id': '235', 'nom': 'visagine'},#Visaginas", "'219', 'nom': 'sakiuose'},#Šakiai {'id': '220', 'nom': 'salcininkuose'},#Šalčininkai {'id': '221', 'nom':", "is nothing') except Exception as e: print(\"There is an error", "soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None: for result_item in soup_of_one_page_of_results\\", "cursor: try: sql = \"INSERT INTO `emails` (\" \\ \"`id_activite`,", "{'id': '183', 'nom': 'akmeneje'},#Akmenė {'id': '184', 'nom': 'alytuje'},#Alytus {'id': '185',", "'184', 'nom': 'alytuje'},#Alytus {'id': '185', 'nom': 'anyksciuose'},#Anykščiai {'id': '186', 'nom':", "None: email = \"info@\" + soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print('email :", "' + email) else: print('no email business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity", "number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text )/20 if", "{'class': 'company_list'})\\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result =", "round(number_of_pages_with_coma) + 1 print('number_of_pages : ' + str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1])", "city html_search = requests.get(url_search) soup_search = BeautifulSoup(html_search.content, 'html.parser') number_of_pages =", "'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None: for result_item", "def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites = [ # {'id': '1', 'url': 'labour'}", "'prienuose'},#Prienai {'id': '215', 'nom': 'radviliskyje'},#Radviliškis {'id': '216', 'nom': 'raseiniuose'},#Raseiniai {'id':", "'217', 'nom': 'rietave'},#Rietavas {'id': '218', 'nom': 'rokiskyje'},#Rokiškis {'id': '219', 'nom':", "'nom': 'vilniuje'},#Vilnius {'id': '235', 'nom': 'visagine'},#Visaginas {'id': '236', 'nom': 'zarasuose'}#Zarasai", "connection.cursor() as cursor: try: sql = \"INSERT INTO `emails` (\"", "\"hotel\" city = \"vilniuje\" url_search = \"https://www.visalietuva.lt/en/search/\" + activity +", "#{'id': '15', 'url': 'bakery'}, #{'id': '16', 'url': 'insurance'}, #{'id': '17',", "# Request the content of a page from the url", "soup_search = BeautifulSoup(html_search.content, 'html.parser') number_of_pages = 0 if soup_search.find('div', {'class':", "import time from bs4 import BeautifulSoup import requests import pymysql.cursors", "= int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text )/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1])", "of a page from the url html_result = requests.get(url_result) #", "soup_of_one_page_of_results \\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}): i_1", "not None: number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text", "'nom': 'anyksciuose'},#Anykščiai {'id': '186', 'nom': 'birstone'},#Birštonas {'id': '187', 'nom': 'birzuose'},#Biržai", "result_item.find('a', {'class': 'company-item-title'}).get('href') # Request the content of a page", "{'id': '193', 'nom': 'jurbarke'},#Jurbarkas {'id': '194', 'nom': 'kaisiadoryse'},#Kaišiadorys {'id': '195',", "'210', 'nom': 'palangoje'},#Palanga {'id': '211', 'nom': 'panevezyje'},#Panevėžys {'id': '212', 'nom':", "20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) + 1", "'url': 'lawyer'}, #{'id': '23', 'url': 'bank'}, #{'id': '24', 'url': 'garage'},", "not None: for result_item in soup_of_one_page_of_results\\ .find('div', {'class': 'company_list'})\\ .find_all('div',", "'30', 'url': 'jeweller'}, #{'id': '31', 'url': 'tailor'}, #{'id': '32', 'url':", "'225', 'nom': 'skuode'},#Skuodas {'id': '226', 'nom': 'svencionyse'},#Švenčionys {'id': '227', 'nom':", "0 if soup_search.find('div', {'class': 'company_list'}) is not None: print(url_search) for", "'197', 'nom': 'kazlu-rudoje'},#Kazlų Rūda {'id': '198', 'nom': 'kedainiuose'},#Kėdainiai {'id': '199',", "{'id': '222', 'nom': 'silaleje'},#Šilalė {'id': '223', 'nom': 'siluteje'},#Šilutė {'id': '224',", "{'id': '229', 'nom': 'trakuose'},#Trakai {'id': '230', 'nom': 'ukmergeje'},#Ukmergė {'id': '231',", "{'itemprop': 'email'}) is not None: email = \"info@\" + soup_result.find('a',", "an error connection at url : \" + str(e)) finally:", "= \"info@\" + soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print('email : ' +", "'akmeneje'},#Akmenė {'id': '184', 'nom': 'alytuje'},#Alytus {'id': '185', 'nom': 'anyksciuose'},#Anykščiai {'id':", "'meat'}, #{'id': '33', 'url': 'library'}, #{'id': '34', 'url': 'architect'} ]", "#{'id': '22', 'url': 'lawyer'}, #{'id': '23', 'url': 'bank'}, #{'id': '24',", "'234', 'nom': 'vilniuje'},#Vilnius {'id': '235', 'nom': 'visagine'},#Visaginas {'id': '236', 'nom':", "'27', 'url': 'accounting'}, #{'id': '28', 'url': 'store'}, #{'id': '29', 'url':", "'raseiniuose'},#Raseiniai {'id': '217', 'nom': 'rietave'},#Rietavas {'id': '218', 'nom': 'rokiskyje'},#Rokiškis {'id':", "= \"INSERT INTO `emails` (\" \\ \"`id_activite`, \" \\ \"`id_capitale_du_monde`,", "The record is stored : \" + email) connection.close() except:", "with the email : \" + email + \" \"", "'html.parser') number_of_pages = 0 if soup_search.find('div', {'class': 'search_count f_left'}) is", "' no email business') else: print('sorry there is nothing') if", "{'id': '204', 'nom': 'marijampoleje'},#Marijampolė {'id': '205', 'nom': 'mazeikiuose'},#Mažeikiai {'id': '206',", "'193', 'nom': 'jurbarke'},#Jurbarkas {'id': '194', 'nom': 'kaisiadoryse'},#Kaišiadorys {'id': '195', 'nom':", "try: activity = activite.get(\"url\") city = capitale.get(\"nom\") url_search = \"https://www.visalietuva.lt/en/search/\"", "\" An error with the email : \" + email", "'6', 'url': 'landlord'}, #{'id': '7', 'url': 'cleaning'}, #{'id': '8', 'url':", "#{'id': '31', 'url': 'tailor'}, #{'id': '32', 'url': 'meat'}, #{'id': '33',", "(\" \\ \"`id_activite`, \" \\ \"`id_capitale_du_monde`, \" \\ \"`email`) VALUE", "+ \\ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost',", "not None: email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1)", "'alytuje'},#Alytus {'id': '185', 'nom': 'anyksciuose'},#Anykščiai {'id': '186', 'nom': 'birstone'},#Birštonas {'id':", "requests.get(url) # Parse the content of html_doc soup = BeautifulSoup(html.content,", "'company-item-title'}).get('href') # Request the content of a page from the", "#{'id': '25', 'url': 'dentist'}, #{'id': '26', 'url': 'doctor'}, #{'id': '27',", "'nom': 'mazeikiuose'},#Mažeikiai {'id': '206', 'nom': 'moletuose'},#Molėtai {'id': '207', 'nom': 'neringoje'},#Neringa", "for i in range(2, number_of_pages + 1): url_of_one_page_of_results = url_search", "{'id': '200', 'nom': 'klaipedoje'},#Klaipėda {'id': '201', 'nom': 'kretingoje'},#Kretinga {'id': '202',", "print(url_search) for result_item in soup_search \\ .find('div', {'class': 'company_list'}) \\", "'pagegiuose'},#Pagėgiai {'id': '209', 'nom': 'pakruojyje'},#Pakruojis {'id': '210', 'nom': 'palangoje'},#Palanga {'id':", "'221', 'nom': 'siauliuose'},#Šiauliai {'id': '222', 'nom': 'silaleje'},#Šilalė {'id': '223', 'nom':", "5: number_of_pages += round(number_of_pages_with_coma) print('number_of_pages : ' + str(number_of_pages)) i_1", "'224', 'nom': 'sirvintose'},#Širvintos {'id': '225', 'nom': 'skuode'},#Skuodas {'id': '226', 'nom':", "'url': 'pharmacy'}, #{'id': '18', 'url': 'moving'}, #{'id': '19', 'url': 'electricity'},", "(%s, %s, %s)\" cursor.execute(sql, ( activite.get('id'), capitale.get('id'), email)) connection.commit() print(str(i_1)", "'search_count f_left'}) is not None: number_of_pages_with_coma = int(soup_search .find('div', {'class':", "+ email) else: print('no email business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity =", "= requests.get(url) # Parse the content of html_doc soup =", "+ \" An error with the email : \" +", "connection.close() except Exception as e: print(str(i_1) + \" An error", "is not None: number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'})", "1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2) # Request the", "activity = activite.get(\"url\") city = capitale.get(\"nom\") url_search = \"https://www.visalietuva.lt/en/search/\" +", "1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') # Request the content", "activites: try: activity = activite.get(\"url\") city = capitale.get(\"nom\") url_search =", "not None: for result_item in soup_of_one_page_of_results \\ .find('div', {'class': 'company_list'})", "'kelmeje'},#Kelmė {'id': '200', 'nom': 'klaipedoje'},#Klaipėda {'id': '201', 'nom': 'kretingoje'},#Kretinga {'id':", "number_of_pages += round(number_of_pages_with_coma) + 1 print('number_of_pages : ' + str(number_of_pages))", "{'id': '212', 'nom': 'pasvalyje'},#Pasvalys {'id': '213', 'nom': 'plungeje'},#Plungė {'id': '214',", ": \" + email) connection.close() except: print(str(i_1) + \" The", "# Parse the content of html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser')", "else: print('sorry there is nothing') except Exception as e: print(\"There", "\"info@\" + \\ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect(", "'electricity'}, #{'id': '20', 'url': 'plumbing'}, #{'id': '21', 'url': 'security'}, #{'id':", "content of a page from the url html = requests.get(url)", "'salcininkuose'},#Šalčininkai {'id': '221', 'nom': 'siauliuose'},#Šiauliai {'id': '222', 'nom': 'silaleje'},#Šilalė {'id':", "print(str(i_1) + ' no email business') else: print('sorry there is", "'10', 'url': 'restaurant'}, #{'id': '11', 'url': 'building'}, #{'id': '12', 'url':", "the content of html_doc soup = BeautifulSoup(html.content, 'html.parser') if soup.find('a',", "'skuode'},#Skuodas {'id': '226', 'nom': 'svencionyse'},#Švenčionys {'id': '227', 'nom': 'taurageje'},#Tauragė {'id':", "is not None: for result_item in soup_of_one_page_of_results\\ .find('div', {'class': 'company_list'})\\", "time.sleep(2) # Request the content of a page from the", "= requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'})", "'185', 'nom': 'anyksciuose'},#Anykščiai {'id': '186', 'nom': 'birstone'},#Birštonas {'id': '187', 'nom':", "'203', 'nom': 'lazdijuose'},#Lazdijai {'id': '204', 'nom': 'marijampoleje'},#Marijampolė {'id': '205', 'nom':", "soup_search.find('div', {'class': 'search_count f_left'}) is not None: number_of_pages_with_coma = int(soup_search", "{'id': '186', 'nom': 'birstone'},#Birštonas {'id': '187', 'nom': 'birzuose'},#Biržai {'id': '188',", "'rokiskyje'},#Rokiškis {'id': '219', 'nom': 'sakiuose'},#Šakiai {'id': '220', 'nom': 'salcininkuose'},#Šalčininkai {'id':", "db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with connection.cursor() as cursor: try: sql", "{'id': '221', 'nom': 'siauliuose'},#Šiauliai {'id': '222', 'nom': 'silaleje'},#Šilalė {'id': '223',", "email) else: print('no email business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity = \"hotel\"", "Parse the content of html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser') if", "'nom': 'sirvintose'},#Širvintos {'id': '225', 'nom': 'skuode'},#Skuodas {'id': '226', 'nom': 'svencionyse'},#Švenčionys", "{'class': 'search_count f_left'}) .find('span').text ) / 20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) <", "soup_result.find('a', {'itemprop': 'email'}) is not None: email = \"info@\" +", "{'id': '232', 'nom': 'varenoje'},#Varėna {'id': '233', 'nom': 'vilkaviskyje'},#Vilkaviškis {'id': '234',", "'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost', port=3306, user='', password='', db='contacts_professionnels',", "{'id': '1', 'url': 'labour'} #{'id': '2', 'url': 'real+estate'}, #{'id': '3',", "unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self): url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request", "'nom': 'marijampoleje'},#Marijampolė {'id': '205', 'nom': 'mazeikiuose'},#Mažeikiai {'id': '206', 'nom': 'moletuose'},#Molėtai", "'28', 'url': 'store'}, #{'id': '29', 'url': 'notary'}, #{'id': '30', 'url':", "'208', 'nom': 'pagegiuose'},#Pagėgiai {'id': '209', 'nom': 'pakruojyje'},#Pakruojis {'id': '210', 'nom':", ".find('span').text )/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) +", "'31', 'url': 'tailor'}, #{'id': '32', 'url': 'meat'}, #{'id': '33', 'url':", "{'id': '234', 'nom': 'vilniuje'},#Vilnius {'id': '235', 'nom': 'visagine'},#Visaginas {'id': '236',", ".find('div', {'class': 'company_list'})\\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result", "'nom': 'ignalinoje'},#Ignalina {'id': '191', 'nom': 'jonavoje'},#Jonava {'id': '192', 'nom': 'joniskyje'},#Joniškis", "of html_doc soup = BeautifulSoup(html.content, 'html.parser') if soup.find('a', {'itemprop': 'email'})", "#{'id': '5', 'url': 'hotel'}, #{'id': '6', 'url': 'landlord'}, #{'id': '7',", "'url': 'moving'}, #{'id': '19', 'url': 'electricity'}, #{'id': '20', 'url': 'plumbing'},", "email = \"info@\" + \\ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection", "'kaisiadoryse'},#Kaišiadorys {'id': '195', 'nom': 'kalvarijoje'},#Kalvarija {'id': '196', 'nom': 'kaune'},#Kaunas {'id':", "{'id': '236', 'nom': 'zarasuose'}#Zarasai ] try: for capitale in capitales_du_monde:", "'186', 'nom': 'birstone'},#Birštonas {'id': '187', 'nom': 'birzuose'},#Biržai {'id': '188', 'nom':", "print(str(i_1) + \" An error with the email : \"", "+= 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2) # Request", "record already exists : \" + email) connection.close() except Exception", "business') else: print('sorry there is nothing') if number_of_pages > 1:", "None: number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text )", "= int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text ) / 20", "if number_of_pages > 1: for i in range(2, number_of_pages +", "{'id': '219', 'nom': 'sakiuose'},#Šakiai {'id': '220', 'nom': 'salcininkuose'},#Šalčininkai {'id': '221',", "= requests.get(url_result) # Parse the content of html_doc soup_result =", "'189', 'nom': 'elektrenuose'},#Elektrėnai {'id': '190', 'nom': 'ignalinoje'},#Ignalina {'id': '191', 'nom':", "= capitale.get(\"nom\") url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\" +", "test_extract_one_email(self): url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request the content of a", "'nom': 'ukmergeje'},#Ukmergė {'id': '231', 'nom': 'utenoje'},#Utena {'id': '232', 'nom': 'varenoje'},#Varėna", "< 5: number_of_pages += round(number_of_pages_with_coma) + 1 print('number_of_pages : '", "{'id': '189', 'nom': 'elektrenuose'},#Elektrėnai {'id': '190', 'nom': 'ignalinoje'},#Ignalina {'id': '191',", "'rietave'},#Rietavas {'id': '218', 'nom': 'rokiskyje'},#Rokiškis {'id': '219', 'nom': 'sakiuose'},#Šakiai {'id':", "'nom': 'birstone'},#Birštonas {'id': '187', 'nom': 'birzuose'},#Biržai {'id': '188', 'nom': 'druskininkuose'},#Druskininkai", "\"https://www.visalietuva.lt/en/search/\" + activity + \"/\" + city html_search = requests.get(url_search)", "city = capitale.get(\"nom\") url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\"", "e: print(str(i_1) + \" An error with the email :", "'nom': 'radviliskyje'},#Radviliškis {'id': '216', 'nom': 'raseiniuose'},#Raseiniai {'id': '217', 'nom': 'rietave'},#Rietavas", "'url': 'labour'} #{'id': '2', 'url': 'real+estate'}, #{'id': '3', 'url': 'recruitment'},", "{'id': '217', 'nom': 'rietave'},#Rietavas {'id': '218', 'nom': 'rokiskyje'},#Rokiškis {'id': '219',", "BeautifulSoup import requests import pymysql.cursors import unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def", "'recruitment'}, #{'id': '4', 'url': 'software'}, #{'id': '5', 'url': 'hotel'}, #{'id':", "#{'id': '23', 'url': 'bank'}, #{'id': '24', 'url': 'garage'}, #{'id': '25',", "str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5: number_of_pages += round(number_of_pages_with_coma) print('number_of_pages :", "stored : \" + email) connection.close() except: print(str(i_1) + \"", "'nom': 'rokiskyje'},#Rokiškis {'id': '219', 'nom': 'sakiuose'},#Šakiai {'id': '220', 'nom': 'salcininkuose'},#Šalčininkai", "'183', 'nom': 'akmeneje'},#Akmenė {'id': '184', 'nom': 'alytuje'},#Alytus {'id': '185', 'nom':", "Exception as e: print(\"There is an error connection at url", "soup = BeautifulSoup(html.content, 'html.parser') if soup.find('a', {'itemprop': 'email'}) is not", "{'class': 'item'}): i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href')", "#{'id': '19', 'url': 'electricity'}, #{'id': '20', 'url': 'plumbing'}, #{'id': '21',", "' + str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5: number_of_pages += round(number_of_pages_with_coma)", "= pymysql.connect( host='localhost', port=3306, user='', password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor )", "'21', 'url': 'security'}, #{'id': '22', 'url': 'lawyer'}, #{'id': '23', 'url':", "def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity = \"hotel\" city = \"vilniuje\" url_search =", "'tailor'}, #{'id': '32', 'url': 'meat'}, #{'id': '33', 'url': 'library'}, #{'id':", "'nom': 'joniskyje'},#Joniškis {'id': '193', 'nom': 'jurbarke'},#Jurbarkas {'id': '194', 'nom': 'kaisiadoryse'},#Kaišiadorys", "as e: print(\"There is an error connection at url :", "the url html_result = requests.get(url_result) # Parse the content of", ")/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) + 1", "{'id': '201', 'nom': 'kretingoje'},#Kretinga {'id': '202', 'nom': 'kupiskyje'},#Kupiškis {'id': '203',", "soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print('email : ' + email) else: print('no", "record is stored : \" + email) connection.close() except: print(str(i_1)", "import BeautifulSoup import requests import pymysql.cursors import unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase):", "result_item in soup_of_one_page_of_results\\ .find('div', {'class': 'company_list'})\\ .find_all('div', {'class': 'item'}): i_1", "a page from the url html = requests.get(url) # Parse", "'url': 'notary'}, #{'id': '30', 'url': 'jeweller'}, #{'id': '31', 'url': 'tailor'},", "{'id': '205', 'nom': 'mazeikiuose'},#Mažeikiai {'id': '206', 'nom': 'moletuose'},#Molėtai {'id': '207',", "'nom': 'visagine'},#Visaginas {'id': '236', 'nom': 'zarasuose'}#Zarasai ] try: for capitale", "Request the content of a page from the url html", "( activite.get('id'), capitale.get('id'), email)) connection.commit() print(str(i_1) + \" The record", "if soup_search.find('div', {'class': 'search_count f_left'}) is not None: number_of_pages_with_coma =", "already exists : \" + email) connection.close() except Exception as", "#{'id': '7', 'url': 'cleaning'}, #{'id': '8', 'url': 'association'}, #{'id': '9',", "{'id': '192', 'nom': 'joniskyje'},#Joniškis {'id': '193', 'nom': 'jurbarke'},#Jurbarkas {'id': '194',", "'194', 'nom': 'kaisiadoryse'},#Kaišiadorys {'id': '195', 'nom': 'kalvarijoje'},#Kalvarija {'id': '196', 'nom':", "+ \"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results", "{'id': '198', 'nom': 'kedainiuose'},#Kėdainiai {'id': '199', 'nom': 'kelmeje'},#Kelmė {'id': '200',", "the content of a page from the url html =", ".find_all('div', {'class': 'item'}): i_1 += 1 url_result = result_item.find('a', {'class':", "'32', 'url': 'meat'}, #{'id': '33', 'url': 'library'}, #{'id': '34', 'url':", "#{'id': '30', 'url': 'jeweller'}, #{'id': '31', 'url': 'tailor'}, #{'id': '32',", "\" + email + \" \" + str(e)) else: print(str(i_1)", "{'id': '195', 'nom': 'kalvarijoje'},#Kalvarija {'id': '196', 'nom': 'kaune'},#Kaunas {'id': '197',", "#{'id': '18', 'url': 'moving'}, #{'id': '19', 'url': 'electricity'}, #{'id': '20',", "'7', 'url': 'cleaning'}, #{'id': '8', 'url': 'association'}, #{'id': '9', 'url':", "'nom': 'kaune'},#Kaunas {'id': '197', 'nom': 'kazlu-rudoje'},#Kazlų Rūda {'id': '198', 'nom':", "#{'id': '32', 'url': 'meat'}, #{'id': '33', 'url': 'library'}, #{'id': '34',", "'9', 'url': 'financial'}, #{'id': '10', 'url': 'restaurant'}, #{'id': '11', 'url':", "'33', 'url': 'library'}, #{'id': '34', 'url': 'architect'} ] capitales_du_monde =", "'24', 'url': 'garage'}, #{'id': '25', 'url': 'dentist'}, #{'id': '26', 'url':", "#{'id': '26', 'url': 'doctor'}, #{'id': '27', 'url': 'accounting'}, #{'id': '28',", "#{'id': '34', 'url': 'architect'} ] capitales_du_monde = [ {'id': '183',", "'nom': 'palangoje'},#Palanga {'id': '211', 'nom': 'panevezyje'},#Panevėžys {'id': '212', 'nom': 'pasvalyje'},#Pasvalys", "'nom': 'kazlu-rudoje'},#Kazlų Rūda {'id': '198', 'nom': 'kedainiuose'},#Kėdainiai {'id': '199', 'nom':", "{'id': '207', 'nom': 'neringoje'},#Neringa {'id': '208', 'nom': 'pagegiuose'},#Pagėgiai {'id': '209',", "'200', 'nom': 'klaipedoje'},#Klaipėda {'id': '201', 'nom': 'kretingoje'},#Kretinga {'id': '202', 'nom':", "+ \" The record is stored : \" + email)", "str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser')", "+ ' email : ' + email) else: print(str(i_1) +", "{'id': '224', 'nom': 'sirvintose'},#Širvintos {'id': '225', 'nom': 'skuode'},#Skuodas {'id': '226',", "#{'id': '17', 'url': 'pharmacy'}, #{'id': '18', 'url': 'moving'}, #{'id': '19',", "'nom': 'pasvalyje'},#Pasvalys {'id': '213', 'nom': 'plungeje'},#Plungė {'id': '214', 'nom': 'prienuose'},#Prienai", "'html.parser') if soup.find('a', {'itemprop': 'email'}) is not None: email =", "'company_list'}) is not None: for result_item in soup_of_one_page_of_results \\ .find('div',", "'financial'}, #{'id': '10', 'url': 'restaurant'}, #{'id': '11', 'url': 'building'}, #{'id':", "'marijampoleje'},#Marijampolė {'id': '205', 'nom': 'mazeikiuose'},#Mažeikiai {'id': '206', 'nom': 'moletuose'},#Molėtai {'id':", "'url': 'garage'}, #{'id': '25', 'url': 'dentist'}, #{'id': '26', 'url': 'doctor'},", "#{'id': '24', 'url': 'garage'}, #{'id': '25', 'url': 'dentist'}, #{'id': '26',", "business') else: print('sorry there is nothing') except Exception as e:", "'1', 'url': 'labour'} #{'id': '2', 'url': 'real+estate'}, #{'id': '3', 'url':", "= \"info@\" + \\ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection =", "BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop': 'email'}) is not None: email", "'nom': 'neringoje'},#Neringa {'id': '208', 'nom': 'pagegiuose'},#Pagėgiai {'id': '209', 'nom': 'pakruojyje'},#Pakruojis", "'telsiuose'},#Telšiai {'id': '229', 'nom': 'trakuose'},#Trakai {'id': '230', 'nom': 'ukmergeje'},#Ukmergė {'id':", "the url html = requests.get(url) # Parse the content of", "'23', 'url': 'bank'}, #{'id': '24', 'url': 'garage'}, #{'id': '25', 'url':", "else: print('sorry there is nothing') if number_of_pages > 1: for", "'elektrenuose'},#Elektrėnai {'id': '190', 'nom': 'ignalinoje'},#Ignalina {'id': '191', 'nom': 'jonavoje'},#Jonava {'id':", "in range(2, number_of_pages+1): url_of_one_page_of_results = url_search + \"/\" + str(i)", "UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self): url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request the content", ": ' + email) else: print('no email business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self):", "'kazlu-rudoje'},#Kazlų Rūda {'id': '198', 'nom': 'kedainiuose'},#Kėdainiai {'id': '199', 'nom': 'kelmeje'},#Kelmė", "{'itemprop': 'email'}).text.split(\"@\")[1] print('email : ' + email) else: print('no email", "\\ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost', port=3306,", "{'id': '188', 'nom': 'druskininkuose'},#Druskininkai {'id': '189', 'nom': 'elektrenuose'},#Elektrėnai {'id': '190',", "content of html_doc soup = BeautifulSoup(html.content, 'html.parser') if soup.find('a', {'itemprop':", "content of html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop':", "= [ # {'id': '1', 'url': 'labour'} #{'id': '2', 'url':", "#{'id': '21', 'url': 'security'}, #{'id': '22', 'url': 'lawyer'}, #{'id': '23',", "is not None: print(url_search) for result_item in soup_search \\ .find('div',", "1: for i in range(2, number_of_pages+1): url_of_one_page_of_results = url_search +", "email business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity = \"hotel\" city = \"vilniuje\"", "{'id': '208', 'nom': 'pagegiuose'},#Pagėgiai {'id': '209', 'nom': 'pakruojyje'},#Pakruojis {'id': '210',", "BeautifulSoup(html_search.content, 'html.parser') number_of_pages = 0 if soup_search.find('div', {'class': 'search_count f_left'})", "\"`id_capitale_du_monde`, \" \\ \"`email`) VALUE (%s, %s, %s)\" cursor.execute(sql, (", "is not None: email = \"info@\" + \\ soup_result.find('a', {'itemprop':", "'search_count f_left'}) .find('span').text )/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages +=", "of html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop': 'email'})", "#{'id': '14', 'url': 'locksmith'}, #{'id': '15', 'url': 'bakery'}, #{'id': '16',", "'accounting'}, #{'id': '28', 'url': 'store'}, #{'id': '29', 'url': 'notary'}, #{'id':", "cursor.execute(sql, ( activite.get('id'), capitale.get('id'), email)) connection.commit() print(str(i_1) + \" The", "'195', 'nom': 'kalvarijoje'},#Kalvarija {'id': '196', 'nom': 'kaune'},#Kaunas {'id': '197', 'nom':", "else: print(str(i_1) + ' no email business') else: print('sorry there", "page from the url html = requests.get(url) # Parse the", "= BeautifulSoup(html.content, 'html.parser') if soup.find('a', {'itemprop': 'email'}) is not None:", "'211', 'nom': 'panevezyje'},#Panevėžys {'id': '212', 'nom': 'pasvalyje'},#Pasvalys {'id': '213', 'nom':", "requests import pymysql.cursors import unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self): url", "'nom': 'lazdijuose'},#Lazdijai {'id': '204', 'nom': 'marijampoleje'},#Marijampolė {'id': '205', 'nom': 'mazeikiuose'},#Mažeikiai", "+ 1 print('number_of_pages : ' + str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >=", "'radviliskyje'},#Radviliškis {'id': '216', 'nom': 'raseiniuose'},#Raseiniai {'id': '217', 'nom': 'rietave'},#Rietavas {'id':", "html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div', {'class':", "None: print(url_search) for result_item in soup_search \\ .find('div', {'class': 'company_list'})", "#{'id': '4', 'url': 'software'}, #{'id': '5', 'url': 'hotel'}, #{'id': '6',", "# {'id': '1', 'url': 'labour'} #{'id': '2', 'url': 'real+estate'}, #{'id':", "'25', 'url': 'dentist'}, #{'id': '26', 'url': 'doctor'}, #{'id': '27', 'url':", "activity + \"/\" + city html_search = requests.get(url_search) soup_search =", "page from the url html_result = requests.get(url_result) # Parse the", "'207', 'nom': 'neringoje'},#Neringa {'id': '208', 'nom': 'pagegiuose'},#Pagėgiai {'id': '209', 'nom':", "the email : \" + email + \" \" +", "'206', 'nom': 'moletuose'},#Molėtai {'id': '207', 'nom': 'neringoje'},#Neringa {'id': '208', 'nom':", "for result_item in soup_search \\ .find('div', {'class': 'company_list'}) \\ .find_all('div',", "'url': 'association'}, #{'id': '9', 'url': 'financial'}, #{'id': '10', 'url': 'restaurant'},", "= BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None:", "if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) + 1 print('number_of_pages", "= \"vilniuje\" url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\" +", "activity = \"hotel\" city = \"vilniuje\" url_search = \"https://www.visalietuva.lt/en/search/\" +", "'url': 'insurance'}, #{'id': '17', 'url': 'pharmacy'}, #{'id': '18', 'url': 'moving'},", "/ 20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) +", "\"`email`) VALUE (%s, %s, %s)\" cursor.execute(sql, ( activite.get('id'), capitale.get('id'), email))", "\"vilniuje\" url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\" + city", "'nom': 'taurageje'},#Tauragė {'id': '228', 'nom': 'telsiuose'},#Telšiai {'id': '229', 'nom': 'trakuose'},#Trakai", "0 if soup_search.find('div', {'class': 'search_count f_left'}) is not None: number_of_pages_with_coma", "capitale.get(\"nom\") url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\" + city", "'nom': 'zarasuose'}#Zarasai ] try: for capitale in capitales_du_monde: for activite", "= \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1) + ' email", "for result_item in soup_of_one_page_of_results \\ .find('div', {'class': 'company_list'}) \\ .find_all('div',", "url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') # Request the content of", "'panevezyje'},#Panevėžys {'id': '212', 'nom': 'pasvalyje'},#Pasvalys {'id': '213', 'nom': 'plungeje'},#Plungė {'id':", "'220', 'nom': 'salcininkuose'},#Šalčininkai {'id': '221', 'nom': 'siauliuose'},#Šiauliai {'id': '222', 'nom':", "1): url_of_one_page_of_results = url_search + \"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2)", "'nom': 'pagegiuose'},#Pagėgiai {'id': '209', 'nom': 'pakruojyje'},#Pakruojis {'id': '210', 'nom': 'palangoje'},#Palanga", "{'id': '214', 'nom': 'prienuose'},#Prienai {'id': '215', 'nom': 'radviliskyje'},#Radviliškis {'id': '216',", "{'id': '223', 'nom': 'siluteje'},#Šilutė {'id': '224', 'nom': 'sirvintose'},#Širvintos {'id': '225',", "+ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost', port=3306,", "{'class': 'search_count f_left'}) is not None: number_of_pages_with_coma = int(soup_search .find('div',", "email business') else: print('sorry there is nothing') if number_of_pages >", "{'id': '190', 'nom': 'ignalinoje'},#Ignalina {'id': '191', 'nom': 'jonavoje'},#Jonava {'id': '192',", "1: for i in range(2, number_of_pages + 1): url_of_one_page_of_results =", "+ soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print('email : ' + email) else:", "+= round(number_of_pages_with_coma) + 1 print('number_of_pages : ' + str(number_of_pages)) elif", "in soup_search \\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}):", "'url': 'restaurant'}, #{'id': '11', 'url': 'building'}, #{'id': '12', 'url': 'hairdresser'},", "'plungeje'},#Plungė {'id': '214', 'nom': 'prienuose'},#Prienai {'id': '215', 'nom': 'radviliskyje'},#Radviliškis {'id':", "from the url html_result = requests.get(url_result) # Parse the content", "'moving'}, #{'id': '19', 'url': 'electricity'}, #{'id': '20', 'url': 'plumbing'}, #{'id':", "'url': 'financial'}, #{'id': '10', 'url': 'restaurant'}, #{'id': '11', 'url': 'building'},", "'nom': 'kedainiuose'},#Kėdainiai {'id': '199', 'nom': 'kelmeje'},#Kelmė {'id': '200', 'nom': 'klaipedoje'},#Klaipėda", "'locksmith'}, #{'id': '15', 'url': 'bakery'}, #{'id': '16', 'url': 'insurance'}, #{'id':", "'software'}, #{'id': '5', 'url': 'hotel'}, #{'id': '6', 'url': 'landlord'}, #{'id':", "int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text )/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) <", "= result_item.find('a', {'class': 'company-item-title'}).get('href') # Request the content of a", "'url': 'landlord'}, #{'id': '7', 'url': 'cleaning'}, #{'id': '8', 'url': 'association'},", "'druskininkuose'},#Druskininkai {'id': '189', 'nom': 'elektrenuose'},#Elektrėnai {'id': '190', 'nom': 'ignalinoje'},#Ignalina {'id':", ": \" + email + \" \" + str(e)) else:", "#{'id': '6', 'url': 'landlord'}, #{'id': '7', 'url': 'cleaning'}, #{'id': '8',", "'pharmacy'}, #{'id': '18', 'url': 'moving'}, #{'id': '19', 'url': 'electricity'}, #{'id':", "'nom': 'akmeneje'},#Akmenė {'id': '184', 'nom': 'alytuje'},#Alytus {'id': '185', 'nom': 'anyksciuose'},#Anykščiai", "from the url html = requests.get(url) # Parse the content", "activite.get('id'), capitale.get('id'), email)) connection.commit() print(str(i_1) + \" The record is", "{'id': '218', 'nom': 'rokiskyje'},#Rokiškis {'id': '219', 'nom': 'sakiuose'},#Šakiai {'id': '220',", "'kretingoje'},#Kretinga {'id': '202', 'nom': 'kupiskyje'},#Kupiškis {'id': '203', 'nom': 'lazdijuose'},#Lazdijai {'id':" ]
[ "dagster.core.errors import DagsterInstanceMigrationRequired from dagster.core.instance import DagsterInstance, InstanceRef from dagster.utils.test", "runs: instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with restore_directory(test_dir):", "the runs.db has moved and otherwise we would have to", "old runs are simply invisible, and their # presence won't", "run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out", "== ['89296095-892d-4a15-aa0d-9018d1580945'] instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db'))", "instance def test_0_6_4(): test_dir = file_relative_path(__file__, 'snapshot_0_6_4') with restore_directory(test_dir): instance", "run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None, head is", "instance migrate`.' ), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate(): test_dir = file_relative_path(__file__,", "the upgrade # method. assert len(runs) == 0 run_ids =", "this only in the upgrade # method. assert len(runs) ==", "'567bc23fd1ac. Please run `dagster instance migrate`.' ), ): for run", "== ['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of", "runs. Instead, we'll do this only in the upgrade #", "`dagster instance migrate`.' ), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate(): test_dir =", "choice -- old runs are simply invisible, and their #", "reasonable choice since # the runs.db has moved and otherwise", "check for the existence of an # old runs.db every", "DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of date and must be", "the existence of an # old runs.db every time we", "every time we accessed the runs. Instead, we'll do this", "instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs = instance.get_runs() assert len(runs) ==", "at revision None, head is ' '567bc23fd1ac. Please run `dagster", "with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() with pytest.raises(", "a check for the existence of an # old runs.db", "their # presence won't raise DagsterInstanceMigrationRequired. This is a reasonable", "and otherwise we would have to do a check for", "since # the runs.db has moved and otherwise we would", "an old instance def test_0_6_4(): test_dir = file_relative_path(__file__, 'snapshot_0_6_4') with", "with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs = instance.get_runs() assert", "# test that we can load runs and events from", "file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs()", "= instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape(", "import os import re import pytest from dagster import file_relative_path", "test_0_6_6_sqlite_exc(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir))", "the runs. Instead, we'll do this only in the upgrade", "won't raise DagsterInstanceMigrationRequired. This is a reasonable choice since #", "have to do a check for the existence of an", "raise DagsterInstanceMigrationRequired. This is a reasonable choice since # the", "dagster.utils.test import restore_directory # test that we can load runs", "# presence won't raise DagsterInstanceMigrationRequired. This is a reasonable choice", "file_relative_path from dagster.core.errors import DagsterInstanceMigrationRequired from dagster.core.instance import DagsterInstance, InstanceRef", "= DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs = instance.get_runs() assert len(runs) == 1", "and must be migrated (SqliteEventLogStorage for run ' '89296095-892d-4a15-aa0d-9018d1580945). Database", "an # old runs.db every time we accessed the runs.", "= file_relative_path(__file__, 'snapshot_0_6_4') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs =", "old instance def test_0_6_4(): test_dir = file_relative_path(__file__, 'snapshot_0_6_4') with restore_directory(test_dir):", "runs = instance.get_runs() assert len(runs) == 1 run_ids = instance._event_storage.get_all_run_ids()", "os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs =", "Please run `dagster instance migrate`.' ), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate():", "with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() # Note", "in runs: instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with", "dagster import file_relative_path from dagster.core.errors import DagsterInstanceMigrationRequired from dagster.core.instance import", "def test_0_6_4(): test_dir = file_relative_path(__file__, 'snapshot_0_6_4') with restore_directory(test_dir): instance =", "run ' '89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None, head is", "'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir):", "head is ' '567bc23fd1ac. Please run `dagster instance migrate`.' ),", "instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() # Note that this", "assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is", "events from an old instance def test_0_6_4(): test_dir = file_relative_path(__file__,", "'snapshot_0_6_6/sqlite/runs.db')) assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir))", "instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db'))", "in the upgrade # method. assert len(runs) == 0 run_ids", "migrated (SqliteEventLogStorage for run ' '89296095-892d-4a15-aa0d-9018d1580945). Database is at revision", "), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert", "a reasonable choice since # the runs.db has moved and", "len(runs) == 0 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945']", "run in runs: instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite')", "'567bc23fd1ac. Please run `dagster instance migrate`.' ), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def", "1 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') assert", "load runs and events from an old instance def test_0_6_4():", "test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs", "pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of date and must", "do this only in the upgrade # method. assert len(runs)", "and events from an old instance def test_0_6_4(): test_dir =", "file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with", "def test_0_6_6_sqlite_exc(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with restore_directory(test_dir): instance =", "0 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises(", "re import pytest from dagster import file_relative_path from dagster.core.errors import", "-- old runs are simply invisible, and their # presence", "run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') assert not", "DagsterInstanceMigrationRequired from dagster.core.instance import DagsterInstance, InstanceRef from dagster.utils.test import restore_directory", "from dagster.utils.test import restore_directory # test that we can load", "instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance", "be migrated (SqliteEventLogStorage for run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at", "we'll do this only in the upgrade # method. assert", "assert len(runs) == 1 run_ids = instance._event_storage.get_all_run_ids() assert run_ids ==", "from dagster.core.errors import DagsterInstanceMigrationRequired from dagster.core.instance import DagsterInstance, InstanceRef from", "`dagster instance migrate`.' ), ): for run in runs: instance.all_logs(run.run_id)", "simply invisible, and their # presence won't raise DagsterInstanceMigrationRequired. This", "invisible, and their # presence won't raise DagsterInstanceMigrationRequired. This is", "existence of an # old runs.db every time we accessed", "upgrade # method. assert len(runs) == 0 run_ids = instance._event_storage.get_all_run_ids()", "is out of date and must be migrated (SqliteEventLogStorage for", "(SqliteEventLogStorage for run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None,", "Note that this is a deliberate choice -- old runs", "this is a deliberate choice -- old runs are simply", "file_relative_path(__file__, 'snapshot_0_6_4') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs()", "): for run in runs: instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc(): test_dir =", "otherwise we would have to do a check for the", "not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs", "import file_relative_path from dagster.core.errors import DagsterInstanceMigrationRequired from dagster.core.instance import DagsterInstance,", "= instance.get_runs() assert len(runs) == 1 run_ids = instance._event_storage.get_all_run_ids() assert", "# old runs.db every time we accessed the runs. Instead,", "test_0_6_6_sqlite_migrate(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert not", "): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__,", "Database is at revision None, head is ' '567bc23fd1ac. Please", "DagsterInstanceMigrationRequired. This is a reasonable choice since # the runs.db", "import pytest from dagster import file_relative_path from dagster.core.errors import DagsterInstanceMigrationRequired", "This is a reasonable choice since # the runs.db has", "migrated (SqliteEventLogStorage for run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision", "presence won't raise DagsterInstanceMigrationRequired. This is a reasonable choice since", "test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert not os.path.exists(file_relative_path(__file__,", "runs are simply invisible, and their # presence won't raise", "is a reasonable choice since # the runs.db has moved", "# Note that this is a deliberate choice -- old", "that this is a deliberate choice -- old runs are", "restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() with pytest.raises( DagsterInstanceMigrationRequired,", "has moved and otherwise we would have to do a", "run `dagster instance migrate`.' ), ): for run in runs:", "restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() # Note that", "import DagsterInstance, InstanceRef from dagster.utils.test import restore_directory # test that", "assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir): instance", "assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade()", "migrate`.' ), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite')", "from dagster.core.instance import DagsterInstance, InstanceRef from dagster.utils.test import restore_directory #", "= file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs =", "['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of date", "import DagsterInstanceMigrationRequired from dagster.core.instance import DagsterInstance, InstanceRef from dagster.utils.test import", "are simply invisible, and their # presence won't raise DagsterInstanceMigrationRequired.", "'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs = instance.get_runs()", "os import re import pytest from dagster import file_relative_path from", "from dagster import file_relative_path from dagster.core.errors import DagsterInstanceMigrationRequired from dagster.core.instance", "is at revision None, head is ' '567bc23fd1ac. Please run", "= DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() # Note that this is", "Instead, we'll do this only in the upgrade # method.", "out of date and must be migrated (SqliteEventLogStorage for run", "(SqliteEventLogStorage for run ' '89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None,", "Please run `dagster instance migrate`.' ), ): for run in", "restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs = instance.get_runs() assert len(runs)", "we can load runs and events from an old instance", "instance.upgrade() runs = instance.get_runs() assert len(runs) == 1 run_ids =", "runs.db has moved and otherwise we would have to do", "'snapshot_0_6_6/sqlite') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() #", "instance.get_runs() assert len(runs) == 1 run_ids = instance._event_storage.get_all_run_ids() assert run_ids", "match=re.escape( 'Instance is out of date and must be migrated", "with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of date and", "deliberate choice -- old runs are simply invisible, and their", "for run ' '89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None, head", "= instance.get_runs() # Note that this is a deliberate choice", "DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is", "and must be migrated (SqliteEventLogStorage for run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database", "restore_directory # test that we can load runs and events", "test that we can load runs and events from an", "run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises( DagsterInstanceMigrationRequired,", "we accessed the runs. Instead, we'll do this only in", "'Instance is out of date and must be migrated (SqliteEventLogStorage", "test_0_6_4(): test_dir = file_relative_path(__file__, 'snapshot_0_6_4') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir))", "of an # old runs.db every time we accessed the", "revision None, head is ' '567bc23fd1ac. Please run `dagster instance", "a deliberate choice -- old runs are simply invisible, and", "for run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None, head", "dagster.core.instance import DagsterInstance, InstanceRef from dagster.utils.test import restore_directory # test", "old runs.db every time we accessed the runs. Instead, we'll", "we would have to do a check for the existence", "migrate`.' ), ): for run in runs: instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc():", "import re import pytest from dagster import file_relative_path from dagster.core.errors", "DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() # Note that this is a", "instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape(", "do a check for the existence of an # old", "== 0 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] with", "is a deliberate choice -- old runs are simply invisible,", "run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert os.path.exists(file_relative_path(__file__,", "and their # presence won't raise DagsterInstanceMigrationRequired. This is a", "= file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db'))", "for run in runs: instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc(): test_dir = file_relative_path(__file__,", "len(runs) == 1 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945']", "' '89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None, head is '", "# pylint: disable=protected-access import os import re import pytest from", "instance migrate`.' ), ): for run in runs: instance.all_logs(run.run_id) def", "for the existence of an # old runs.db every time", "= instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') assert not os.path.exists(file_relative_path(__file__,", "' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None, head is '", "= DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance", "DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs = instance.get_runs() assert len(runs) == 1 run_ids", "runs.db every time we accessed the runs. Instead, we'll do", "must be migrated (SqliteEventLogStorage for run ' '89296095-892d-4a15-aa0d-9018d1580945). Database is", "= instance.get_runs() with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of", "# method. assert len(runs) == 0 run_ids = instance._event_storage.get_all_run_ids() assert", "must be migrated (SqliteEventLogStorage for run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is", "is ' '567bc23fd1ac. Please run `dagster instance migrate`.' ), ):", "choice since # the runs.db has moved and otherwise we", "test_dir = file_relative_path(__file__, 'snapshot_0_6_4') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs", "instance.get_runs() # Note that this is a deliberate choice --", "'89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None, head is ' '567bc23fd1ac.", "), ): for run in runs: instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc(): test_dir", "DagsterInstance, InstanceRef from dagster.utils.test import restore_directory # test that we", "instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db'))", "pylint: disable=protected-access import os import re import pytest from dagster", "to do a check for the existence of an #", "runs and events from an old instance def test_0_6_4(): test_dir", "method. assert len(runs) == 0 run_ids = instance._event_storage.get_all_run_ids() assert run_ids", "== 1 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945')", "that we can load runs and events from an old", "date and must be migrated (SqliteEventLogStorage for run ' '89296095-892d-4a15-aa0d-9018d1580945).", "' '567bc23fd1ac. Please run `dagster instance migrate`.' ), ): for", "only in the upgrade # method. assert len(runs) == 0", "# the runs.db has moved and otherwise we would have", "can load runs and events from an old instance def", "date and must be migrated (SqliteEventLogStorage for run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5).", "def test_0_6_6_sqlite_migrate(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert", "accessed the runs. Instead, we'll do this only in the", "None, head is ' '567bc23fd1ac. Please run `dagster instance migrate`.'", "disable=protected-access import os import re import pytest from dagster import", "instance.get_runs() with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of date", "'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None, head is ' '567bc23fd1ac.", "instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with restore_directory(test_dir): instance", "time we accessed the runs. Instead, we'll do this only", "would have to do a check for the existence of", "'snapshot_0_6_4') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() with", "run `dagster instance migrate`.' ), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate(): test_dir", "assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert", "assert len(runs) == 0 run_ids = instance._event_storage.get_all_run_ids() assert run_ids ==", "InstanceRef from dagster.utils.test import restore_directory # test that we can", "import restore_directory # test that we can load runs and", "runs = instance.get_runs() with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out", "of date and must be migrated (SqliteEventLogStorage for run '", "be migrated (SqliteEventLogStorage for run ' '89296095-892d-4a15-aa0d-9018d1580945). Database is at", "' '567bc23fd1ac. Please run `dagster instance migrate`.' ), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945')", "runs = instance.get_runs() # Note that this is a deliberate", "os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir): instance =", "moved and otherwise we would have to do a check", "pytest from dagster import file_relative_path from dagster.core.errors import DagsterInstanceMigrationRequired from", "from an old instance def test_0_6_4(): test_dir = file_relative_path(__file__, 'snapshot_0_6_4')" ]
[ "= df.groupby(['CATEGORY']).size().to_frame('Crimes') labels = ['Trespassing', 'Vehicle theft', 'General Theft', 'Damage", "/ len(df) grouped = grouped.drop(columns='Size') grouped.index = grouped.index.astype(int) p =", "By Year') p.get_legend().remove() plt.savefig('../charts/year.png') def group_by_territory(df): grouped = df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage']", "p.set_title('Crimes Percentage Grouped By Year') p.get_legend().remove() plt.savefig('../charts/year.png') def group_by_territory(df): grouped", "= 100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') grouped.index", "plt.savefig('../charts/category.png') def group_by_time_of_day(df): grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', labels=['Day',", "theft', 'General Theft', 'Damage to Property', 'Robbery', 'Homicide'] p =", "'Thursday', 'Friday', 'Saturday', 'Sunday'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes", "grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Month') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Month')", "of The Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def group_by_month(df): grouped = df.groupby(['MONTH']).size().to_frame('Size')", "p.set_title('Crimes Percentage Grouped By Day of The Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png')", "Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def group_by_month(df): grouped = df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] =", "grouped = grouped.drop(columns='Size') p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By", "* grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') grouped.index = grouped.index.astype(int)", "100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') p =", "Crimes') p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png') def group_by_year(df): grouped = df.groupby(['YEAR']).size().to_frame('Crimes') p", "p.get_legend().remove() plt.savefig('../charts/year.png') def group_by_territory(df): grouped = df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] = 100", "'Homicide'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By", "grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Time", "group_by_territory(df): grouped = df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] /", "autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Day of The Week') p.get_legend().remove()", "'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] p = grouped.plot.pie(y='Crimes', labels=labels,", "p.get_legend().remove() plt.savefig('../charts/category.png') def group_by_time_of_day(df): grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes',", "p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Month') p.set_ylabel('Percentage of", "'Sunday'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By", "'Night'], autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Time of Day') p.get_legend().remove()", "Grouped By Time of Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df): grouped", "'Robbery', 'Homicide'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped", "grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Category') p.get_legend().remove() plt.savefig('../charts/category.png')", "p.set_ylabel('Percentage of Crimes') p.set_xlabel('Territory Number') p.get_legend().remove() plt.savefig('../charts/territory.png') if __name__ ==", "= grouped.drop(columns='Size') p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Month')", "Crimes') p.set_xlabel('Territory Number') p.get_legend().remove() plt.savefig('../charts/territory.png') if __name__ == '__main__': df", "import matplotlib.pyplot as plt import pandas as pd def group_by_category(df):", "* grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') p = grouped.plot.bar()", "By Category') p.get_legend().remove() plt.savefig('../charts/category.png') def group_by_time_of_day(df): grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p", "grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%')", "Day of The Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def group_by_month(df): grouped =", "= ['Trespassing', 'Vehicle theft', 'General Theft', 'Damage to Property', 'Robbery',", "to Property', 'Robbery', 'Homicide'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes", "pandas as pd def group_by_category(df): grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes') labels =", "Number') p.get_legend().remove() plt.savefig('../charts/territory.png') if __name__ == '__main__': df = pd.read_csv('../data/crimes_dataset_processed_incomplete.csv')", "pd def group_by_category(df): grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes') labels = ['Trespassing', 'Vehicle", "'Friday', 'Saturday', 'Sunday'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage", "The Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def group_by_month(df): grouped = df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage']", "Territory') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Territory Number') p.get_legend().remove() plt.savefig('../charts/territory.png') if __name__", "'Saturday', 'Sunday'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped", "p.set_ylabel('Percentage of Crimes') p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png') def group_by_year(df): grouped =", "['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] p = grouped.plot.pie(y='Crimes',", "len(df) grouped = grouped.drop(columns='Size') grouped.index = grouped.index.astype(int) p = grouped.plot.bar()", "grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes') labels = ['Trespassing', 'Vehicle theft', 'General Theft',", "By Time of Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df): grouped =", "Year') p.get_legend().remove() plt.savefig('../charts/year.png') def group_by_territory(df): grouped = df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] =", "if __name__ == '__main__': df = pd.read_csv('../data/crimes_dataset_processed_incomplete.csv') group_by_territory(df) group_by_year(df) group_by_month(df)", "grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Territory') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Territory", "p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Day", "Property', 'Robbery', 'Homicide'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage", "'Evening', 'Night'], autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Time of Day')", "plt import pandas as pd def group_by_category(df): grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes')", "= grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Month') p.set_ylabel('Percentage of Crimes')", "def group_by_time_of_day(df): grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', labels=['Day', 'Evening',", "grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size')", "Grouped By Territory') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Territory Number') p.get_legend().remove() plt.savefig('../charts/territory.png')", "'__main__': df = pd.read_csv('../data/crimes_dataset_processed_incomplete.csv') group_by_territory(df) group_by_year(df) group_by_month(df) group_by_time_of_day(df) group_by_day_of_the_week(df) group_by_category(df)", "of Crimes') p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png') def group_by_year(df): grouped = df.groupby(['YEAR']).size().to_frame('Crimes')", "plt.savefig('../charts/territory.png') if __name__ == '__main__': df = pd.read_csv('../data/crimes_dataset_processed_incomplete.csv') group_by_territory(df) group_by_year(df)", "labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Day of The Week')", "Percentage Grouped By Category') p.get_legend().remove() plt.savefig('../charts/category.png') def group_by_time_of_day(df): grouped =", "__name__ == '__main__': df = pd.read_csv('../data/crimes_dataset_processed_incomplete.csv') group_by_territory(df) group_by_year(df) group_by_month(df) group_by_time_of_day(df)", "labels = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] p", "grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday',", "= grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Territory') p.set_ylabel('Percentage of Crimes')", "['Trespassing', 'Vehicle theft', 'General Theft', 'Damage to Property', 'Robbery', 'Homicide']", "p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Year') p.get_legend().remove()", "By Month') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png') def group_by_year(df):", "grouped.drop(columns='Size') grouped.index = grouped.index.astype(int) p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped", "= df.groupby(['YEAR']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By", "matplotlib.pyplot as plt import pandas as pd def group_by_category(df): grouped", "grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Day of The", "plt.savefig('../charts/day_of_the_week.png') def group_by_month(df): grouped = df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] = 100 *", "Grouped By Category') p.get_legend().remove() plt.savefig('../charts/category.png') def group_by_time_of_day(df): grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes')", "grouped = grouped.drop(columns='Size') grouped.index = grouped.index.astype(int) p = grouped.plot.bar() p.set_title('Crimes", "Percentage Grouped By Territory') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Territory Number') p.get_legend().remove()", "grouped.drop(columns='Size') p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Month') p.set_ylabel('Percentage", "as pd def group_by_category(df): grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes') labels = ['Trespassing',", "df.groupby(['CATEGORY']).size().to_frame('Crimes') labels = ['Trespassing', 'Vehicle theft', 'General Theft', 'Damage to", "def group_by_category(df): grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes') labels = ['Trespassing', 'Vehicle theft',", "= grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By", "of Crimes') p.set_xlabel('Territory Number') p.get_legend().remove() plt.savefig('../charts/territory.png') if __name__ == '__main__':", "p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Category')", "group_by_time_of_day(df): grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'],", "import pandas as pd def group_by_category(df): grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes') labels", "def group_by_day_of_the_week(df): grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels = ['Monday', 'Tuesday', 'Wednesday',", "= df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped", "p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Territory') p.set_ylabel('Percentage of", "Category') p.get_legend().remove() plt.savefig('../charts/category.png') def group_by_time_of_day(df): grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p =", "'Damage to Property', 'Robbery', 'Homicide'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%')", "== '__main__': df = pd.read_csv('../data/crimes_dataset_processed_incomplete.csv') group_by_territory(df) group_by_year(df) group_by_month(df) group_by_time_of_day(df) group_by_day_of_the_week(df)", "'General Theft', 'Damage to Property', 'Robbery', 'Homicide'] p = grouped.plot.pie(y='Crimes',", "df.groupby(['YEAR']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Year')", "= grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Category') p.get_legend().remove()", "/ len(df) grouped = grouped.drop(columns='Size') p = grouped.plot.bar() p.set_title('Crimes Percentage", "= df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%') p.set_title('Crimes", "of Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df): grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels", "plt.savefig('../charts/month.png') def group_by_year(df): grouped = df.groupby(['YEAR']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%')", "def group_by_territory(df): grouped = df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size']", "'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%')", "By Day of The Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def group_by_month(df): grouped", "grouped.index.astype(int) p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Territory') p.set_ylabel('Percentage", "grouped.index = grouped.index.astype(int) p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By", "plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df): grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels = ['Monday', 'Tuesday',", "df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped =", "len(df) grouped = grouped.drop(columns='Size') p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped", "By Territory') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Territory Number') p.get_legend().remove() plt.savefig('../charts/territory.png') if", "= df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday',", "p.set_xlabel('Territory Number') p.get_legend().remove() plt.savefig('../charts/territory.png') if __name__ == '__main__': df =", "p = grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped", "Time of Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df): grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes')", "grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') p = grouped.plot.bar() p.set_title('Crimes", "df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%') p.set_title('Crimes Percentage", "autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Time of Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png')", "p.set_title('Crimes Percentage Grouped By Month') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Month') p.get_legend().remove()", "grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Year') p.get_legend().remove() plt.savefig('../charts/year.png') def", "df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']", "= grouped.index.astype(int) p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Territory')", "def group_by_month(df): grouped = df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size']", "Percentage Grouped By Time of Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df):", "group_by_month(df): grouped = df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] /", "df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped =", "Theft', 'Damage to Property', 'Robbery', 'Homicide'] p = grouped.plot.pie(y='Crimes', labels=labels,", "Month') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png') def group_by_year(df): grouped", "= grouped.drop(columns='Size') grouped.index = grouped.index.astype(int) p = grouped.plot.bar() p.set_title('Crimes Percentage", "labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Time of", "= 100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') p", "= df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped", "group_by_category(df): grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes') labels = ['Trespassing', 'Vehicle theft', 'General", "p.set_title('Crimes Percentage Grouped By Time of Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def", "'Vehicle theft', 'General Theft', 'Damage to Property', 'Robbery', 'Homicide'] p", "as plt import pandas as pd def group_by_category(df): grouped =", "Percentage Grouped By Day of The Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def", "p.get_legend().remove() plt.savefig('../charts/month.png') def group_by_year(df): grouped = df.groupby(['YEAR']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes',", "= grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Day of", "labels = ['Trespassing', 'Vehicle theft', 'General Theft', 'Damage to Property',", "grouped = df.groupby(['YEAR']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped", "labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Category') p.get_legend().remove() plt.savefig('../charts/category.png') def", "group_by_year(df): grouped = df.groupby(['YEAR']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes Percentage", "p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png') def group_by_year(df): grouped = df.groupby(['YEAR']).size().to_frame('Crimes') p =", "p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def group_by_month(df): grouped = df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] = 100", "Grouped By Day of The Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def group_by_month(df):", "= grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Year') p.get_legend().remove() plt.savefig('../charts/year.png')", "Grouped By Year') p.get_legend().remove() plt.savefig('../charts/year.png') def group_by_territory(df): grouped = df.groupby(['PDQ']).size().to_frame('Size')", "def group_by_year(df): grouped = df.groupby(['YEAR']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes", "plt.savefig('../charts/year.png') def group_by_territory(df): grouped = df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] = 100 *", "Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df): grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels =", "Grouped By Month') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png') def", "grouped = df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df)", "= ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] p =", "autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Category') p.get_legend().remove() plt.savefig('../charts/category.png') def group_by_time_of_day(df):", "p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df): grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels = ['Monday',", "p.get_legend().remove() plt.savefig('../charts/territory.png') if __name__ == '__main__': df = pd.read_csv('../data/crimes_dataset_processed_incomplete.csv') group_by_territory(df)", "Percentage Grouped By Year') p.get_legend().remove() plt.savefig('../charts/year.png') def group_by_territory(df): grouped =", "p.set_title('Crimes Percentage Grouped By Territory') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Territory Number')", "grouped = df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df)", "100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') grouped.index =", "p.set_title('Crimes Percentage Grouped By Category') p.get_legend().remove() plt.savefig('../charts/category.png') def group_by_time_of_day(df): grouped", "grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') grouped.index = grouped.index.astype(int) p", "autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Year') p.get_legend().remove() plt.savefig('../charts/year.png') def group_by_territory(df):", "Percentage Grouped By Month') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png')", "group_by_day_of_the_week(df): grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels = ['Monday', 'Tuesday', 'Wednesday', 'Thursday'," ]
[ "2]), 5) self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_matching(profile, [2, 1,", "3, 4, 5]], \"av\": [[0, 1, 4, 5], [0, 2,", "2, 4, 5]], \"cc-noilp\": [[0, 1, 2, 3], [0, 1,", "1, 2], [0, 1, 3], [0, 2, 3]]) # this", "msg=rule + \" failed with resolute=True\") def test_monroe_indivisible(self): from preferences", "p1 = DichotomousPreferences([0, 4, 5]) p2 = DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1,", "2], [1, 2], [1], [3]]) committeesize = 3 for ilp", "[1, 2, 4, 5]], \"monroe-noilp\": [[0, 1, 2, 3], [0,", "\"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "[0], [1, 4, 5], [1], [2, 4, 5], [2], [3,", "5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "[1, 2, 3, 5], [1, 2, 4, 5], [1, 3,", "preflist preflist.reverse() for p in preflist: p.reverse() profile = Profile(6)", "2, 5], [2], [0, 1, 2, 3, 4], [0, 3,", "[1, 2, 3, 5], [1, 2, 4, 5]], \"seqcc\": [[0,", "[1, 2, 3, 4], [1, 2, 3, 5], [1, 2,", "failed with resolute=True\") unittestinstance.assertTrue( output[0] in tests[rule], msg=rules_approval.MWRULES[rule] + \"", "[0, 1, 3, 4], [0, 1, 3, 5], [0, 2,", "[1, 2, 3]], \"minimaxav-ilp\": [[0, 1, 3], [0, 2, 3],", "\"rule-x\": [[0, 1, 2, 4]], \"phragmen-enestroem\": [[0, 1, 2, 4]],", "with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize, resolute=True) def test_mwrules_weightsconsidered(self): from preferences", "import DichotomousPreferences num_cand = 7 prof = Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0,", "test_mwrules_correct_simple(self): from preferences import Profile import rules_approval self.longMessage = True", "= 2 for rule in rules_approval.MWRULES.keys(): if rule == \"greedy-monroe\":", "4, 5])), None) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0,", "committee [0, 1, 3]) self.assertEqual( rules_approval.compute_monroe(profile, committeesize, ilp=ilp, resolute=False), [[0,", "[0, 1, 3]) self.assertEqual( rules_approval.compute_monroe(profile, committeesize, ilp=ilp, resolute=False), [[0, 1,", "3, 5], [1, 2, 4, 5]], \"cc-ilp\": [[0, 1, 2,", "\"phragmen-enestroem\": [[0, 1, 2, 4]], } run_test_instance(self, profile, committeesize, tests3)", "3], [1, 3, 4]], \"monroe-ilp\": [[0, 1, 3], [0, 2,", "2 for rule in rules_approval.MWRULES.keys(): if rule == \"greedy-monroe\": #", "4) self.assertEqual(monroescore_matching(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]),", "msg=rules_approval.MWRULES[rule] + \" failed\") output = rules_approval.compute_rule( rule, profile, committeesize,", "msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") unittestinstance.assertTrue( output[0] in tests[rule],", "5]], \"phragmen-enestroem\": [[0, 1, 4, 5], [0, 2, 4, 5],", "def test_mwrules_correct_advanced_1(self): from preferences import Profile self.longMessage = True committeesize", "result = rules_approval.compute_rule(rule, profile, committeesize) self.assertTrue([1] in result, msg=rule +", "profile profile = Profile(5) committeesize = 3 preflist = [[0,", "[1, 2, 3, 5], [1, 2, 4, 5]], \"monroe-noilp\": [[0,", "5]]) committeesize = 3 self.assertEqual( len(compute_rule(\"optphrag\", profile, committeesize, resolute=False)), 12)", "[1, 5, 3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_matching(profile,", "prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4, 5]), None) with self.assertRaises(Exception):", "profile = Profile(6) profile.add_preferences(preflist) run_test_instance(self, profile, committeesize, tests1) def test_mwrules_correct_advanced_2(self):", "self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)), 6, msg=rule + \" failed\") for rule", "1, 3], [0, 2, 3], [1, 2, 3]], \"greedy-monroe\": [[0,", "4]], \"slav-noilp\": [[0, 1, 2, 4]], \"rule-x\": [[0, 1, 2,", "2, 4], [0, 1, 2, 5], [0, 1, 3, 4],", "2], [0, 1, 2], [0, 1], [3, 4], [3, 4],", "[0, 2, 3, 5], [0, 2, 4, 5]], \"phrag\": [[0,", "[[0, 1, 2]], \"sav\": [[0, 1, 3]], \"pav-ilp\": [[0, 1,", "[0, 2, 4, 5]], \"phrag\": [[0, 1, 2, 4]], \"optphrag\":", "5], [1, 2, 4, 5]], \"greedy-monroe\": [[0, 1, 2, 3]],", "profile profile = Profile(6) preflist = [[0, 1], [1], [1,", "= True profile = Profile(4) profile.add_preferences([[0], [0], [0], [1, 2],", "2], [0]] profile.add_preferences(preflist) for rule in rules_approval.MWRULES.keys(): with self.assertRaises(Exception): rules_approval.compute_rule(rule,", "5], [0, 2, 4, 5]], \"phrag\": [[0, 1, 2, 4]],", "3, 4]], \"cc-noilp\": [[0, 1, 3], [0, 2, 3], [0,", "1, 2, 3]], \"monroe-ilp\": [[0, 1, 2, 3]], \"monroe-noilp\": [[0,", "4, 5]], \"pav-noilp\": [[0, 1, 4, 5], [0, 2, 4,", "4, 5], [2, 3, 4, 5]], \"pav-noilp\": [[0, 1, 4,", "3, 4], [1, 2, 3], [1, 3, 4]], \"seqcc\": [[0,", "2, 4]], } run_test_instance(self, profile, committeesize, tests3) def test_monroescore(self): from", "2]) self.assertEqual(prof.add_preferences([p1, p2]), None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4)) self.assertFalse(prof.has_unit_weights())", "rules_approval # all rules used? for rule in rules_approval.MWRULES: unittestinstance.assertTrue(rule", "3, 4, 5], [1, 2, 4, 5], [1, 3, 4,", "1, 4, 5], [0, 2, 4, 5], [0, 3, 4,", "in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize, resolute=True)), 1, msg=rule + \"", "[0, 1, 2, 5], [0, 2, 3, 4], [0, 2,", "2, 3]], \"cc-ilp\": [[0, 1, 2, 3]], \"cc-noilp\": [[0, 1,", "2, 4]], \"slav-noilp\": [[0, 1, 2, 4]], \"rule-x\": [[0, 1,", "[2, 3, 4, 5]], \"phrag\": [[0, 1, 4, 5], [0,", "# this test shows that tiebreaking is not (yet) #", "4 preflist = [[0, 1, 2], [1], [1, 2], [0]]", "tests1) # and now with reversed preflist preflist.reverse() for p", "[1, 3], [1, 3], [1, 4], [2, 4], [2, 5],", "preflist.reverse() for p in preflist: p.reverse() profile = Profile(6) profile.add_preferences(preflist)", "Profile(4) profile.add_preferences([[0], [1], [2], [3]]) committeesize = 2 for rule", "4, 5]], \"seqcc\": [[0, 1, 2, 3], [0, 1, 2,", "\"seqpav\": [[0, 1, 2, 4]], \"av\": [[0, 1, 2, 4],", "3, 5], [0, 2, 4, 5], [1, 2, 3, 4],", "[1, 2], [1], [3]]) committeesize = 3 for ilp in", "def run_test_instance(unittestinstance, profile, committeesize, tests): import rules_approval # all rules", "[0, 2, 3, 5], [1, 2, 3, 4], [1, 2,", "4 profile = Profile(6) preflist = [[0, 4, 5], [0],", "3, 4], [1, 2, 3, 5]], \"revseqcc\": [[0, 1, 2,", "[[0, 1, 3]], } run_test_instance(self, profile, committeesize, tests2) def test_mwrules_correct_advanced_3(self):", "4, 5], [2, 3, 4, 5]], \"phragmen-enestroem\": [[0, 1, 4,", "[1, 2, 3, 5], [1, 2, 4, 5]], \"cc-noilp\": [[0,", "test_createprofiles(self): from preferences import Profile from preferences import DichotomousPreferences num_cand", "2, 3], [1, 2, 3]], \"cc-ilp\": [[0, 1, 3], [0,", "import Profile self.longMessage = True # and another profile profile", "committeesize, resolute=True) def test_mwrules_weightsconsidered(self): from preferences import Profile from preferences", "(yet) # implemented for opt-Phragmen def test_optphrag_notiebreaking(self): from preferences import", "= Profile(6) preflist = [[0, 4, 5], [0], [1, 4,", "# and now with reversed preflist preflist.reverse() for p in", "[0]] profile.add_preferences(preflist) for rule in rules_approval.MWRULES.keys(): with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile,", "[1, 3, 4]], \"cc-noilp\": [[0, 1, 3], [0, 2, 3],", "1, 2, 5], [0, 2, 3, 4], [0, 2, 3,", "4, 5]], \"revseqcc\": [[0, 1, 2, 3], [0, 1, 2,", "True profile = Profile(4) profile.add_preferences([[0], [1], [2], [3]]) committeesize =", "and now with reversed preflist preflist.reverse() for p in preflist:", "committeesize = 3 preflist = [[0, 1, 2], [0, 1,", "\"cc-noilp\": [[0, 1, 3], [0, 2, 3], [0, 3, 4],", "profile.add_preferences([[0], [0], [0], [1, 2], [1, 2], [1], [3]]) committeesize", "[2, 3, 4, 5]], \"av\": [[0, 1, 4, 5], [0,", "from preferences import Profile from rules_approval import compute_rule self.longMessage =", "5], [0, 1, 3, 4], [0, 1, 3, 5], [0,", "5], [2, 3, 4, 5]], \"phrag\": [[0, 1, 4, 5],", "preflist: p.reverse() profile = Profile(6) profile.add_preferences(preflist) run_test_instance(self, profile, committeesize, tests1)", "\"revseqpav\": [[0, 1, 3]], \"minimaxav-noilp\": [[0, 1, 3], [0, 2,", "[1, 2, 4, 5], [1, 3, 4, 5], [2, 3,", "[0, 3, 4], [1, 2, 3], [1, 3, 4]], \"revseqcc\":", "\"slav-noilp\": [[0, 1, 2, 4]], \"rule-x\": [[0, 1, 2, 4]],", "with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4, 5]), None)", "= [[0, 1, 2], [1], [1, 2], [0]] profile.add_preferences(preflist) for", "1, 2], [0, 1, 2], [0, 1, 2], [0, 1,", "4, 5]) p2 = DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1, p2]), None) self.assertTrue(prof.has_unit_weights())", "self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4, 5])), None) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception):", "[3, 4], [3, 4], [3]] profile.add_preferences(preflist) tests2 = { \"seqpav\":", "[1, 2, 4, 5]], \"greedy-monroe\": [[0, 1, 2, 3]], \"seqslav\":", "profile.add_preferences(preflist) tests3 = { \"seqpav\": [[0, 1, 2, 4]], \"av\":", "[0, 1, 2, 5], [0, 1, 3, 4], [0, 1,", "[0, 1, 3, 5], [0, 2, 3, 4], [0, 2,", "run_test_instance(self, profile, committeesize, tests2) def test_mwrules_correct_advanced_3(self): from preferences import Profile", "\" failed\") for rule in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize, resolute=True)),", "tests3 = { \"seqpav\": [[0, 1, 2, 4]], \"av\": [[0,", "import rules_approval self.longMessage = True profile = Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0]))", "[[0, 1, 2, 4]], \"pav-noilp\": [[0, 1, 2, 4]], \"revseqpav\":", "rules_approval.MWRULES: unittestinstance.assertTrue(rule in tests.keys()) for rule in tests.keys(): output =", "2, 3], [0, 1, 2, 4], [0, 1, 2, 5],", "2, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1,", "[[0, 1, 2, 3]], \"seqcc\": [[0, 1, 2, 4], [0,", "5]), 3) self.assertEqual(monroescore_matching(profile, [2, 5, 4]), 3) if __name__ ==", "with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize) with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize,", "5], [1], [2, 4, 5], [2], [3, 4, 5], [3]]", "unittestinstance.assertTrue(rule in tests.keys()) for rule in tests.keys(): output = rules_approval.compute_rule(rule,", "Profile from score_functions import monroescore_flowbased, monroescore_matching self.longMessage = True #", "self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_matching(profile, [1, 3, 2]), 5)", "Profile(6) preflist = [[0, 4, 5], [0], [1, 4, 5],", "reversed preflist preflist.reverse() for p in preflist: p.reverse() profile =", "2, 3, 5]], \"seqslav\": [[0, 1, 2, 4], [0, 1,", "[[0, 1, 4, 5], [0, 2, 4, 5], [0, 3,", "2, 3, 5], [1, 2, 4, 5]], \"seqcc\": [[0, 1,", "committeesize = 3 self.assertEqual( len(compute_rule(\"optphrag\", profile, committeesize, resolute=False)), 12) def", "4], [0, 2, 3, 5], [1, 2, 3, 4], [1,", "1, 2, 4]], } run_test_instance(self, profile, committeesize, tests3) def test_monroescore(self):", "1, 2, 3]], \"cc-ilp\": [[0, 1, 2, 3]], \"cc-noilp\": [[0,", "4], [0, 2, 3, 5], [0, 2, 4, 5], [0,", "\"monroe-ilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]],", "[0], [0], [1, 2], [1, 2], [1], [3]]) committeesize =", "2, 3, 5], [0, 2, 4, 5], [1, 2, 3,", "profile = Profile(6) profile.add_preferences([[0], [0], [1, 3], [1, 3], [1,", "# Monroe and rule x only work with unit weights:", "Monroe score is 6 (even for committee [0, 1, 3])", "Profile from preferences import DichotomousPreferences num_cand = 7 prof =", "[0, 2, 4, 5], [1, 2, 3, 4], [1, 2,", "rule in rules_approval.MWRULES.keys(): if rule == \"greedy-monroe\": # always returns", "[1, 4, 5], [1], [2, 4, 5], [2], [3, 4,", "profile, committeesize, resolute=True) def test_mwrules_weightsconsidered(self): from preferences import Profile from", "preferences import Profile from preferences import DichotomousPreferences num_cand = 7", "[0, 3, 4], [1, 2, 3], [1, 3, 4]], \"monroe-ilp\":", "4, 5]], \"optphrag\": [[0, 1, 2, 3]], \"cc-ilp\": [[0, 1,", "1, 3], [0, 2, 3], [1, 2, 3]], \"cc-ilp\": [[0,", "with unit weights: continue result = rules_approval.compute_rule(rule, profile, committeesize) self.assertTrue([1]", "[0, 2, 3, 4], [0, 2, 3, 5], [0, 2,", "used? for rule in rules_approval.MWRULES: unittestinstance.assertTrue(rule in tests.keys()) for rule", "msg=rule + \" failed\") for rule in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile,", "5], [1, 2, 4, 5]], \"monroe-noilp\": [[0, 1, 2, 3],", "2, 3]], \"cc-ilp\": [[0, 1, 3], [0, 2, 3], [0,", "tests.keys()) for rule in tests.keys(): output = rules_approval.compute_rule(rule, profile, committeesize,", "2, 4, 5]], \"monroe-noilp\": [[0, 1, 2, 3], [0, 1,", "weights: continue result = rules_approval.compute_rule(rule, profile, committeesize) self.assertTrue([1] in result,", "4, 5], [2, 3, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2,", "5]], \"phrag\": [[0, 1, 4, 5], [0, 2, 4, 5],", "5]], \"av\": [[0, 1, 4, 5], [0, 2, 4, 5],", "1], [1], [1, 3], [4], [2], [1, 5, 3]] profile.add_preferences(preflist)", "2, 3]], \"seqcc\": [[0, 1, 2, 4], [0, 1, 2,", "5], [0, 3, 4, 5], [1, 2, 3, 4], [1,", "profile.add_preferences([[0], [1], [2], [3]]) committeesize = 2 for rule in", "1]] profile.add_preferences(preflist) tests3 = { \"seqpav\": [[0, 1, 2, 4]],", "for p in preflist: p.reverse() profile = Profile(6) profile.add_preferences(preflist) run_test_instance(self,", "3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_matching(profile, [1, 3,", "3, 4], [0, 1, 3, 5], [0, 2, 3, 4],", "3]], \"minimaxav-ilp\": [[0, 1, 3], [0, 2, 3], [1, 2,", "resolute=True) def test_mwrules_weightsconsidered(self): from preferences import Profile from preferences import", "[2], [0, 1, 2, 3, 4], [0, 3, 4], [0,", "4, 5]) p1 = DichotomousPreferences([0, 4, 5]) p2 = DichotomousPreferences([1,", "import monroescore_flowbased, monroescore_matching self.longMessage = True # and a third", "4], [1, 2, 3, 5], [1, 2, 4, 5]], \"greedy-monroe\":", "2, 4]], \"revseqpav\": [[0, 1, 2, 4]], \"minimaxav-noilp\": [[0, 1,", "[1, 2, 4, 5]], \"monroe-ilp\": [[0, 1, 2, 3], [0,", "[0, 1, 4, 5], [0, 2, 3, 4], [0, 2,", "5], [0, 2, 3, 4], [0, 2, 3, 5], [1,", "2, 3, 5], [1, 2, 4, 5]], \"monroe-noilp\": [[0, 1,", "# Unit tests import unittest def run_test_instance(unittestinstance, profile, committeesize, tests):", "Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0])) committeesize = 1 for", "[[0, 4, 5], [0], [1, 4, 5], [1], [2, 4,", "self.assertEqual(monroescore_matching(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]), 3)", "4, 5], [2, 3, 4, 5]], \"revseqpav\": [[0, 1, 4,", "[0, 3, 4], [1, 2, 3], [1, 3, 4]], \"cc-noilp\":", "\"revseqpav\": [[0, 1, 4, 5], [0, 2, 4, 5], [0,", "\"seqpav\": [[0, 1, 3]], \"av\": [[0, 1, 2]], \"sav\": [[0,", "4, 5], [0, 2, 4, 5], [0, 3, 4, 5],", "4]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "committeesize) self.assertTrue([1] in result, msg=rule + \" failed\"+str(result)) def test_mwrules_correct_simple(self):", "DichotomousPreferences([0, 4, 5])), None) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1]))", "self.assertEqual(prof.totalweight(), 6.4) def test_mwrules__toofewcandidates(self): from preferences import Profile import rules_approval", "3], [0, 2, 3]]) # this test shows that tiebreaking", "4]], \"seqcc\": [[0, 1, 3], [0, 2, 3], [0, 3,", "rules_approval.MWRULES.keys(): if rule == \"greedy-monroe\": # always returns one committee", "\"optphrag\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]],", "2], [0, 2, 5], [2], [0, 1, 2, 3, 4],", "2, 4], [0, 1, 2, 5], [0, 2, 3, 4],", "3, 4, 5], [2, 3, 4, 5]], \"minimaxav-ilp\": [[0, 1,", "preferences import DichotomousPreferences num_cand = 7 prof = Profile(num_cand) self.assertEqual(prof.add_preferences(", "num_cand = 7 prof = Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4, 5])),", "= { \"seqpav\": [[0, 1, 2, 4]], \"av\": [[0, 1,", "prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4, 5]), None) with self.assertRaises(Exception): prof.add_preferences([0, 4, 5,", "preflist = [[0, 1, 2], [1], [1, 2], [0]] profile.add_preferences(preflist)", "3 preflist = [[0, 1, 2], [0, 1, 2], [0,", "2]], \"sav\": [[0, 1, 3]], \"pav-ilp\": [[0, 1, 3]], \"pav-noilp\":", "2, 3, 5], [0, 2, 4, 5]], \"minimaxav-ilp\": [[0, 1,", "[2, 5]]) committeesize = 3 self.assertEqual( len(compute_rule(\"optphrag\", profile, committeesize, resolute=False)),", "3, 4], [1, 2, 3], [1, 3, 4]], \"monroe-ilp\": [[0,", "\"revseqcc\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "this test shows that tiebreaking is not (yet) # implemented", "rule in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize, resolute=True)), 1, msg=rule +", "from rules_approval import compute_rule self.longMessage = True profile = Profile(6)", "3, 4, 5]], \"phragmen-enestroem\": [[0, 1, 4, 5], [0, 2,", "self.assertEqual( rules_approval.compute_monroe(profile, committeesize, ilp=ilp, resolute=False), [[0, 1, 2], [0, 1,", "[[0, 1, 2], [0, 1, 3], [0, 2, 3]]) #", "profile = Profile(6) committeesize = 4 preflist = [[0, 3,", "1, 2, 4]], \"revseqpav\": [[0, 1, 2, 4]], \"minimaxav-noilp\": [[0,", "tests3) def test_monroescore(self): from preferences import Profile from score_functions import", "score is 6 (even for committee [0, 1, 3]) self.assertEqual(", "6, msg=rule + \" failed\") for rule in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule,", "\"greedy-monroe\": [[0, 2, 3, 4]], \"slav-ilp\": [[0, 1, 2, 3],", "self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4) def test_mwrules__toofewcandidates(self): from preferences import Profile import", "unittest def run_test_instance(unittestinstance, profile, committeesize, tests): import rules_approval # all", "5], [1, 2, 3, 4], [1, 2, 3, 5]], \"rule-x\":", "5]) p2 = DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1, p2]), None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0,", "\"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "Profile(6) profile.add_preferences([[0], [0], [1, 3], [1, 3], [1, 4], [2,", "self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4, 5]), None) with self.assertRaises(Exception): prof.add_preferences([0, 4,", "committeesize = 4 preflist = [[0, 3, 4, 5], [1,", "4], [1, 2, 3, 5], [1, 2, 4, 5]], \"cc-noilp\":", "profile, committeesize)), 6, msg=rule + \" failed\") for rule in", "def test_createprofiles(self): from preferences import Profile from preferences import DichotomousPreferences", "profile.add_preferences(DichotomousPreferences([0])) committeesize = 1 for rule in rules_approval.MWRULES.keys(): if \"monroe\"", "[[0, 1, 3]], \"optphrag\": [[0, 1, 3], [0, 2, 3],", "2, 5], [0, 2, 3, 4], [0, 2, 3, 5],", "4]], \"rule-x\": [[0, 1, 2, 4]], \"phragmen-enestroem\": [[0, 1, 2,", "6.4) def test_mwrules__toofewcandidates(self): from preferences import Profile import rules_approval profile", "Profile self.longMessage = True # and a third profile profile", "committeesize, tests3) def test_monroescore(self): from preferences import Profile from score_functions", "TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self): from preferences import Profile from preferences import", "opt-Phragmen def test_optphrag_notiebreaking(self): from preferences import Profile from rules_approval import", "= [[0, 4, 5], [0], [1, 4, 5], [1], [2,", "\"optphrag\": [[0, 1, 2, 3]], \"cc-ilp\": [[0, 1, 2, 3]],", "3]], \"optphrag\": [[0, 1, 3], [0, 2, 3], [1, 2,", "rule in rules_approval.MWRULES: unittestinstance.assertTrue(rule in tests.keys()) for rule in tests.keys():", "[1, 3, 4, 5], [2, 3, 4, 5]], \"pav-ilp\": [[0,", "Profile(5) committeesize = 4 preflist = [[0, 1, 2], [1],", "len(compute_rule(\"optphrag\", profile, committeesize, resolute=False)), 12) def test_mwrules_correct_advanced_1(self): from preferences import", "3]], \"monroe-noilp\": [[0, 1, 2, 3]], \"greedy-monroe\": [[0, 2, 3,", "profile = Profile(6) preflist = [[0, 1], [1], [1, 3],", "[0, 1, 3, 4], [0, 1, 3, 5], [0, 1,", "[1, 2, 3], [1, 3, 4]], \"revseqcc\": [[0, 1, 3],", "5]), None) with self.assertRaises(Exception): prof.add_preferences([0, 4, 5, \"1\"]) with self.assertRaises(Exception):", "2, 5], [0, 1, 3, 4], [0, 1, 3, 5],", "self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize, resolute=True)), 1, msg=rule + \" failed with", "for ilp in [True, False]: # max Monroe score is", "2, 4, 5], [0, 3, 4, 5], [1, 2, 4,", "\"pav-ilp\": [[0, 1, 2, 4]], \"pav-noilp\": [[0, 1, 2, 4]],", "(even for committee [0, 1, 3]) self.assertEqual( rules_approval.compute_monroe(profile, committeesize, ilp=ilp,", "3, 4, 5]], \"pav-noilp\": [[0, 1, 4, 5], [0, 2,", "5]], \"seqslav\": [[0, 1, 2, 4], [0, 1, 2, 5],", "[2, 3, 4, 5]], \"phragmen-enestroem\": [[0, 1, 4, 5], [0,", "3, 4], [1, 2, 3], [1, 3, 4]], \"cc-noilp\": [[0,", "4], [0, 2, 3, 5], [0, 2, 4, 5]], \"minimaxav-ilp\":", "profile, committeesize) self.assertTrue([1] in result, msg=rule + \" failed\"+str(result)) def", "4, 5], [1, 2, 3, 4], [1, 2, 3, 5],", "def test_monroescore(self): from preferences import Profile from score_functions import monroescore_flowbased,", "2, 3, 5], [1, 2, 4, 5]], \"monroe-ilp\": [[0, 1,", "max Monroe score is 6 (even for committee [0, 1,", "= DichotomousPreferences([0, 4, 5]) p2 = DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1, p2]),", "[1, 2, 3, 5]], \"revseqcc\": [[0, 1, 2, 3]], \"monroe-ilp\":", "\"optphrag\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "that tiebreaking is not (yet) # implemented for opt-Phragmen def", "\"slav-ilp\": [[0, 1, 3]], \"slav-noilp\": [[0, 1, 3]], \"rule-x\": [[0,", "DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1, p2]), None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4))", "[[0, 1, 3]], \"phragmen-enestroem\": [[0, 1, 3]], } run_test_instance(self, profile,", "only work with unit weights: continue result = rules_approval.compute_rule(rule, profile,", "5], [0, 1, 4, 5], [0, 2, 3, 4], [0,", "[0, 2, 3, 5], [0, 2, 4, 5]], \"revseqcc\": [[0,", "3], [1, 2, 3]], \"minimaxav-ilp\": [[0, 1, 3], [0, 2,", "2, 3]], \"greedy-monroe\": [[0, 2, 3, 4]], \"slav-ilp\": [[0, 1,", "7 prof = Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4, 5])), None) with", "4], [1, 2, 3], [1, 3, 4]], \"revseqcc\": [[0, 1,", "2, 3], [1, 2, 3]], \"monroe-noilp\": [[0, 1, 3], [0,", "[[0, 3, 4, 5], [1, 2], [0, 2, 5], [2],", "5) self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_matching(profile, [2, 1, 5]),", "committeesize) with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize, resolute=True) def test_mwrules_weightsconsidered(self): from", "def test_mwrules_weightsconsidered(self): from preferences import Profile from preferences import DichotomousPreferences", "2, 4], [0, 1]] profile.add_preferences(preflist) tests3 = { \"seqpav\": [[0,", "import Profile import rules_approval self.longMessage = True profile = Profile(4)", "\"pav-noilp\": [[0, 1, 4, 5], [0, 2, 4, 5], [0,", "[[0, 1, 3]], \"seqslav\": [[0, 1, 3]], \"slav-ilp\": [[0, 1,", "\"monroe-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "committeesize = 1 for rule in rules_approval.MWRULES.keys(): if \"monroe\" in", "run_test_instance(unittestinstance, profile, committeesize, tests): import rules_approval # all rules used?", "4, 5]], \"revseqpav\": [[0, 1, 4, 5], [0, 2, 4,", "= True profile = Profile(6) profile.add_preferences([[0], [0], [1, 3], [1,", "4, 5]], \"sav\": [[0, 1, 2, 3], [0, 1, 2,", "1, 4, 5], [0, 2, 3, 4], [0, 2, 3,", "[[0, 1, 3]], \"pav-noilp\": [[0, 1, 3]], \"revseqpav\": [[0, 1,", "4], [0, 2, 3, 5], [0, 2, 4, 5]], \"phrag\":", "profile, committeesize, tests): import rules_approval # all rules used? for", "[0, 3, 4, 5], [1, 2, 4, 5], [1, 3,", "5], [0, 2, 4, 5], [1, 2, 3, 4], [1,", "2, 3], [0, 3, 4], [1, 2, 3], [1, 3,", "5]], \"optphrag\": [[0, 1, 2, 3]], \"cc-ilp\": [[0, 1, 2,", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"minimaxav-ilp\":", "\"minimaxav-ilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]],", "tests2) def test_mwrules_correct_advanced_3(self): from preferences import Profile self.longMessage = True", "in rule: # Monroe and rule x only work with", "profile.add_preferences(preflist) tests2 = { \"seqpav\": [[0, 1, 3]], \"av\": [[0,", "unittestinstance.assertEqual( output, tests[rule], msg=rules_approval.MWRULES[rule] + \" failed\") output = rules_approval.compute_rule(", "profile = Profile(5) committeesize = 4 preflist = [[0, 1,", "in rules_approval.MWRULES.keys(): with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize) with self.assertRaises(Exception): rules_approval.compute_rule(rule,", "[3, 4, 5], [3]] profile.add_preferences(preflist) tests1 = { \"seqpav\": [[0,", "[1, 3, 4, 5], [2, 3, 4, 5]], \"revseqpav\": [[0,", "4]], \"revseqpav\": [[0, 1, 2, 4]], \"minimaxav-noilp\": [[0, 1, 2,", "1, 2, 4]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1,", "from preferences import Profile from score_functions import monroescore_flowbased, monroescore_matching self.longMessage", "3]], \"av\": [[0, 1, 2]], \"sav\": [[0, 1, 3]], \"pav-ilp\":", "[0, 1]] profile.add_preferences(preflist) tests3 = { \"seqpav\": [[0, 1, 2,", "in tests.keys(): output = rules_approval.compute_rule(rule, profile, committeesize, resolute=False) unittestinstance.assertEqual( output,", "3]], \"seqcc\": [[0, 1, 2, 4], [0, 1, 2, 5],", "committeesize, resolute=False)), 12) def test_mwrules_correct_advanced_1(self): from preferences import Profile self.longMessage", "3, 4, 5]], \"sav\": [[0, 1, 2, 3], [0, 1,", "test_monroescore(self): from preferences import Profile from score_functions import monroescore_flowbased, monroescore_matching", "[[0, 1], [1], [1, 3], [4], [2], [1, 5, 3]]", "[[0, 1, 2, 3], [0, 1, 2, 4], [0, 1,", "[1, 3, 4, 5], [2, 3, 4, 5]], \"optphrag\": [[0,", "2, 3]], \"phrag\": [[0, 1, 3]], \"optphrag\": [[0, 1, 3],", "work with unit weights: continue result = rules_approval.compute_rule(rule, profile, committeesize)", "4, 5], [2], [3, 4, 5], [3]] profile.add_preferences(preflist) tests1 =", "5], [1, 2, 4, 5]], \"cc-ilp\": [[0, 1, 2, 3],", "3], [1, 2, 3]], \"cc-ilp\": [[0, 1, 3], [0, 2,", "Profile(6) preflist = [[0, 1], [1], [1, 3], [4], [2],", "[1, 3], [4], [2], [1, 5, 3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1,", "2, 3, 5]], \"slav-noilp\": [[0, 1, 2, 3], [0, 1,", "2, 3, 4], [0, 2, 3, 5], [1, 2, 3,", "import compute_rule self.longMessage = True profile = Profile(6) profile.add_preferences([[0], [0],", "5]], \"sav\": [[0, 1, 2, 3], [0, 1, 2, 4],", "tests1 = { \"seqpav\": [[0, 1, 4, 5], [0, 2,", "4, 5]], \"cc-ilp\": [[0, 1, 2, 3], [0, 1, 2,", "[1, 3, 4, 5], [2, 3, 4, 5]], \"minimaxav-ilp\": [[0,", "Profile import rules_approval self.longMessage = True profile = Profile(4) profile.add_preferences([[0],", "== \"greedy-monroe\": # always returns one committee continue self.assertEqual(len(rules_approval.compute_rule(rule, profile,", "{ \"seqpav\": [[0, 1, 3]], \"av\": [[0, 1, 2]], \"sav\":", "self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [1], [2], [3]])", "[0, 2, 4], [0, 1]] profile.add_preferences(preflist) tests3 = { \"seqpav\":", "3], [0, 1, 2, 4], [0, 2, 3, 4], [0,", "5]], \"revseqcc\": [[0, 1, 2, 3]], \"monroe-ilp\": [[0, 1, 2,", "[1, 2, 4, 5]], \"cc-ilp\": [[0, 1, 2, 3], [0,", "one committee continue self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)), 6, msg=rule + \"", "= { \"seqpav\": [[0, 1, 4, 5], [0, 2, 4,", "4, 5], [0, 2, 3, 4], [0, 2, 3, 5],", "+ \" failed\"+str(result)) def test_mwrules_correct_simple(self): from preferences import Profile import", "2, 3, 4], [1, 2, 3, 5], [1, 2, 4,", "4], [2, 5], [2, 5]]) committeesize = 3 self.assertEqual( len(compute_rule(\"optphrag\",", "rules_approval self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [0], [0],", "[0, 1], [3, 4], [3, 4], [3]] profile.add_preferences(preflist) tests2 =", "3, 5], [0, 2, 4, 5], [0, 3, 4, 5],", "output = rules_approval.compute_rule(rule, profile, committeesize, resolute=False) unittestinstance.assertEqual( output, tests[rule], msg=rules_approval.MWRULES[rule]", "test_mwrules__toofewcandidates(self): from preferences import Profile import rules_approval profile = Profile(5)", "[0, 1, 2, 3, 4], [0, 3, 4], [0, 2,", "2, 4]], \"pav-noilp\": [[0, 1, 2, 4]], \"revseqpav\": [[0, 1,", "import rules_approval # all rules used? for rule in rules_approval.MWRULES:", "Monroe and rule x only work with unit weights: continue", "[0, 3, 4], [0, 2, 4], [0, 1]] profile.add_preferences(preflist) tests3", "5], [0, 2, 4, 5], [0, 3, 4, 5], [1,", "DichotomousPreferences num_cand = 7 prof = Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4,", "4], [2, 4], [2, 5], [2, 5]]) committeesize = 3", "1, 3, 5], [0, 2, 3, 4], [0, 2, 3,", "import rules_approval self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [0],", "3, 4], [0, 1, 3, 5], [0, 1, 4, 5],", "2, 4, 5]], \"seqcc\": [[0, 1, 2, 3], [0, 1,", "msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") class TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self):", "preferences import Profile from score_functions import monroescore_flowbased, monroescore_matching self.longMessage =", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"minimaxav-noilp\":", "[1], [1, 2], [0]] profile.add_preferences(preflist) for rule in rules_approval.MWRULES.keys(): with", "rules_approval.MWRULES.keys(): with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize) with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile,", "3, 4, 5]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1,", "in rules_approval.MWRULES: unittestinstance.assertTrue(rule in tests.keys()) for rule in tests.keys(): output", "rules used? for rule in rules_approval.MWRULES: unittestinstance.assertTrue(rule in tests.keys()) for", "[2, 4], [2, 5], [2, 5]]) committeesize = 3 self.assertEqual(", "1, 2, 3], [0, 1, 2, 4], [0, 1, 2,", "4, 5], [2, 3, 4, 5]], \"minimaxav-noilp\": [[0, 1, 2,", "1, 5]), 4) self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]), 3) self.assertEqual(monroescore_matching(profile, [2,", "[[0, 1, 2, 3]], \"greedy-monroe\": [[0, 2, 3, 4]], \"slav-ilp\":", "tests import unittest def run_test_instance(unittestinstance, profile, committeesize, tests): import rules_approval", "self.longMessage = True profile = Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5))", "\"greedy-monroe\": [[0, 1, 3]], \"seqslav\": [[0, 1, 3]], \"slav-ilp\": [[0,", "3, 5], [0, 2, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2,", "4], [1, 2, 3], [1, 3, 4]], \"monroe-ilp\": [[0, 1,", "tests.keys(): output = rules_approval.compute_rule(rule, profile, committeesize, resolute=False) unittestinstance.assertEqual( output, tests[rule],", "3, 4, 5], [2, 3, 4, 5]], \"pav-ilp\": [[0, 1,", "[0, 3, 4], [1, 2, 3], [1, 3, 4]], \"seqcc\":", "2, 3], [0, 1, 2, 4], [0, 2, 3, 4],", "3, 4]], \"monroe-ilp\": [[0, 1, 3], [0, 2, 3], [1,", "[0, 2, 3], [1, 2, 3]], \"cc-ilp\": [[0, 1, 3],", "4]], \"sav\": [[0, 1, 2, 4]], \"pav-ilp\": [[0, 1, 2,", "4, 5], [2, 3, 4, 5]], \"optphrag\": [[0, 1, 2,", "1, 3]], \"optphrag\": [[0, 1, 3], [0, 2, 3], [1,", "[1, 2, 3, 5]], \"slav-noilp\": [[0, 1, 2, 3], [0,", "3]], \"cc-ilp\": [[0, 1, 2, 3]], \"cc-noilp\": [[0, 1, 2,", "1, 3], [0, 2, 3], [0, 3, 4], [1, 2,", "1, 3]], \"slav-noilp\": [[0, 1, 3]], \"rule-x\": [[0, 1, 3]],", "from preferences import Profile from preferences import DichotomousPreferences num_cand =", "and a third profile profile = Profile(6) preflist = [[0,", "1, 2], [0, 1, 2], [0, 1], [3, 4], [3,", "test shows that tiebreaking is not (yet) # implemented for", "\" failed with resolute=True\") class TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self): from preferences", "[2, 1, 5]), 4) self.assertEqual(monroescore_matching(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_flowbased(profile,", "4, 5], [2, 3, 4, 5]], } run_test_instance(self, profile, committeesize,", "prof.add_preferences([\"1\", 0, 4, 5]) p1 = DichotomousPreferences([0, 4, 5]) p2", "and another profile profile = Profile(5) committeesize = 3 preflist", "self.assertEqual(prof.add_preferences([0, 4, 5]), None) with self.assertRaises(Exception): prof.add_preferences([0, 4, 5, \"1\"])", "4 preflist = [[0, 3, 4, 5], [1, 2], [0,", "1, 2, 3]], \"greedy-monroe\": [[0, 2, 3, 4]], \"slav-ilp\": [[0,", "4], [1, 2, 3, 5], [1, 2, 4, 5], [1,", "4], [0, 2, 3, 4]], \"sav\": [[0, 1, 2, 4]],", "4]], \"slav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "2, 3, 4], [1, 2, 3, 5]], \"slav-noilp\": [[0, 1,", "committeesize, resolute=True)), 1, msg=rule + \" failed with resolute=True\") def", "for rule in rules_approval.MWRULES: unittestinstance.assertTrue(rule in tests.keys()) for rule in", "[[0, 1, 2, 4]], \"revseqpav\": [[0, 1, 2, 4]], \"minimaxav-noilp\":", "committeesize = 3 for ilp in [True, False]: # max", "from preferences import Profile import rules_approval profile = Profile(5) committeesize", "= 3 for ilp in [True, False]: # max Monroe", "4, 5]], \"monroe-noilp\": [[0, 1, 2, 3], [0, 1, 2,", "2, 3]], \"monroe-noilp\": [[0, 1, 3], [0, 2, 3], [1,", "monroescore_matching self.longMessage = True # and a third profile profile", "self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4) def test_mwrules__toofewcandidates(self):", "1, 2, 4], [0, 2, 3, 4], [0, 2, 3,", "a third profile profile = Profile(6) committeesize = 4 preflist", "4]], \"phragmen-enestroem\": [[0, 1, 2, 4]], } run_test_instance(self, profile, committeesize,", "3]], \"seqslav\": [[0, 1, 2, 4]], \"slav-ilp\": [[0, 1, 2,", "self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_matching(profile, [2, 1, 5]), 4)", "1, 2, 4], [0, 1, 2, 5], [0, 1, 3,", "rule in rules_approval.MWRULES.keys(): if \"monroe\" in rule or \"rule-x\" in", "3, 4, 5], [2, 3, 4, 5]], \"minimaxav-noilp\": [[0, 1,", "committeesize = 4 preflist = [[0, 1, 2], [1], [1,", "1, 2], [0, 1, 2], [0, 1, 2], [0, 1],", "\"phrag\": [[0, 1, 3]], \"optphrag\": [[0, 1, 3], [0, 2,", "5], [1, 2, 3, 4], [1, 2, 3, 5]], \"slav-noilp\":", "[1, 2, 3, 5]], \"rule-x\": [[0, 1, 4, 5], [0,", "Profile self.longMessage = True committeesize = 4 profile = Profile(6)", "# implemented for opt-Phragmen def test_optphrag_notiebreaking(self): from preferences import Profile", "2, 3, 4], [0, 2, 3, 5], [0, 2, 4,", "2, 3, 4]], \"sav\": [[0, 1, 2, 4]], \"pav-ilp\": [[0,", "5], [1, 2], [0, 2, 5], [2], [0, 1, 2,", "[2, 3, 4, 5]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0,", "4], [0, 1, 3, 5], [0, 2, 3, 4], [0,", "3, 5], [1, 2, 4, 5]], \"greedy-monroe\": [[0, 1, 2,", "4], [3]] profile.add_preferences(preflist) tests2 = { \"seqpav\": [[0, 1, 3]],", "[[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"cc-ilp\":", "2, 4]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1, 2,", "2, 3, 5], [0, 2, 4, 5], [0, 3, 4,", "1, 5]), 4) self.assertEqual(monroescore_matching(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_flowbased(profile, [2,", "= Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4, 5])), None) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand]))", "third profile profile = Profile(6) preflist = [[0, 1], [1],", "rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize, resolute=True)), 1, msg=rule + \" failed", "with resolute=True\") def test_monroe_indivisible(self): from preferences import Profile import rules_approval", "[1], [3]]) committeesize = 3 for ilp in [True, False]:", "committeesize, tests1) # and now with reversed preflist preflist.reverse() for", "returns one committee continue self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)), 6, msg=rule +", "+ \" failed with resolute=True\") class TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self): from", "output = rules_approval.compute_rule( rule, profile, committeesize, resolute=True) unittestinstance.assertEqual( len(output), 1,", "3, 5], [1, 2, 4, 5], [1, 3, 4, 5],", "1, 3, 4], [0, 1, 3, 5], [0, 1, 4,", "= [[0, 1], [1], [1, 3], [4], [2], [1, 5,", "resolute=True) unittestinstance.assertEqual( len(output), 1, msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\")", "3], [1, 2, 3]], \"greedy-monroe\": [[0, 1, 3]], \"seqslav\": [[0,", "[0], [1, 3], [1, 3], [1, 4], [2, 4], [2,", "test_optphrag_notiebreaking(self): from preferences import Profile from rules_approval import compute_rule self.longMessage", "[3]] profile.add_preferences(preflist) tests1 = { \"seqpav\": [[0, 1, 4, 5],", "Profile(6) profile.add_preferences(preflist) run_test_instance(self, profile, committeesize, tests1) def test_mwrules_correct_advanced_2(self): from preferences", "self.assertTrue([1] in result, msg=rule + \" failed\"+str(result)) def test_mwrules_correct_simple(self): from", "from preferences import Profile from preferences import DichotomousPreferences import rules_approval", "profile, committeesize, resolute=True)), 1, msg=rule + \" failed with resolute=True\")", "3, 4, 5], [2, 3, 4, 5]], \"av\": [[0, 1,", "5], [2, 3, 4, 5]], \"av\": [[0, 1, 4, 5],", "\"pav-ilp\": [[0, 1, 4, 5], [0, 2, 4, 5], [0,", "\"cc-ilp\": [[0, 1, 2, 3]], \"cc-noilp\": [[0, 1, 2, 3]],", "preflist = [[0, 3, 4, 5], [1, 2], [0, 2,", "3, 4, 5], [1, 2], [0, 2, 5], [2], [0,", "5], [2, 3, 4, 5]], } run_test_instance(self, profile, committeesize, tests1)", "3, 4]], \"sav\": [[0, 1, 2, 4]], \"pav-ilp\": [[0, 1,", "4], [0, 2, 3, 5], [0, 2, 4, 5]], \"revseqcc\":", "# and a third profile profile = Profile(6) committeesize =", "3, 5]], \"revseqcc\": [[0, 1, 2, 3]], \"monroe-ilp\": [[0, 1,", "2, 3, 5], [1, 2, 3, 4], [1, 2, 3,", "[1, 2, 3, 5], [1, 2, 4, 5]], \"cc-ilp\": [[0,", "[3]] profile.add_preferences(preflist) tests2 = { \"seqpav\": [[0, 1, 3]], \"av\":", "1, 2, 4]], \"av\": [[0, 1, 2, 4], [0, 2,", "[2], [3]]) committeesize = 2 for rule in rules_approval.MWRULES.keys(): if", "3, 4, 5], [2, 3, 4, 5]], \"phrag\": [[0, 1,", "def test_mwrules__toofewcandidates(self): from preferences import Profile import rules_approval profile =", "2, 3]], \"monroe-noilp\": [[0, 1, 2, 3]], \"greedy-monroe\": [[0, 2,", "in rules_approval.MWRULES.keys(): if rule == \"greedy-monroe\": # always returns one", "[0, 2, 3], [1, 2, 3]], \"monroe-noilp\": [[0, 1, 3],", "\"cc-noilp\": [[0, 1, 2, 3]], \"seqcc\": [[0, 1, 2, 4],", "rules_approval self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [1], [2],", "1, 2, 5], [0, 1, 3, 4], [0, 1, 3,", "[[0, 1, 3]], \"slav-ilp\": [[0, 1, 3]], \"slav-noilp\": [[0, 1,", "[0, 1, 2, 4], [0, 2, 3, 4], [0, 2,", "2, 4]], \"optphrag\": [[0, 1, 2, 3], [0, 1, 2,", "2, 3, 4]], \"slav-ilp\": [[0, 1, 2, 3], [0, 1,", "2, 4, 5], [1, 2, 3, 4], [1, 2, 3,", "5]], \"monroe-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "3, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1,", "in preflist: p.reverse() profile = Profile(6) profile.add_preferences(preflist) run_test_instance(self, profile, committeesize,", "= [[0, 1, 2], [0, 1, 2], [0, 1, 2],", "[2, 3, 4, 5]], \"pav-ilp\": [[0, 1, 4, 5], [0,", "[2, 3, 4, 5]], \"pav-noilp\": [[0, 1, 4, 5], [0,", "3]], \"minimaxav-noilp\": [[0, 1, 3], [0, 2, 3], [1, 2,", "} run_test_instance(self, profile, committeesize, tests2) def test_mwrules_correct_advanced_3(self): from preferences import", "[0, 1, 3, 5], [0, 1, 4, 5], [0, 2,", "\"greedy-monroe\": [[0, 1, 2, 3]], \"seqslav\": [[0, 1, 2, 4]],", "[1], [2, 4, 5], [2], [3, 4, 5], [3]] profile.add_preferences(preflist)", "3, 4, 5], [2, 3, 4, 5]], \"sav\": [[0, 1,", "3, 4, 5]], } run_test_instance(self, profile, committeesize, tests1) # and", "with reversed preflist preflist.reverse() for p in preflist: p.reverse() profile", "= Profile(6) profile.add_preferences(preflist) run_test_instance(self, profile, committeesize, tests1) def test_mwrules_correct_advanced_2(self): from", "1, 3]], \"revseqpav\": [[0, 1, 3]], \"minimaxav-noilp\": [[0, 1, 3],", "in tests.keys()) for rule in tests.keys(): output = rules_approval.compute_rule(rule, profile,", "# all rules used? for rule in rules_approval.MWRULES: unittestinstance.assertTrue(rule in", "failed with resolute=True\") class TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self): from preferences import", "3]], \"monroe-noilp\": [[0, 1, 3], [0, 2, 3], [1, 2,", "[0, 2, 3, 5], [0, 2, 4, 5], [1, 2,", "3, 5], [1, 2, 4, 5]], \"monroe-ilp\": [[0, 1, 2,", "from preferences import Profile self.longMessage = True committeesize = 4", "2, 3, 4], [1, 2, 3, 5]], \"revseqcc\": [[0, 1,", "= 4 profile = Profile(6) preflist = [[0, 4, 5],", "1, 3], [0, 2, 3]]) # this test shows that", "[1, 2, 3, 4], [1, 2, 3, 5]], \"slav-noilp\": [[0,", "5]], \"rule-x\": [[0, 1, 4, 5], [0, 2, 4, 5],", "4, 5]], \"pav-ilp\": [[0, 1, 4, 5], [0, 2, 4,", "2, 3], [1, 2, 3]], \"minimaxav-ilp\": [[0, 1, 3], [0,", "\"seqcc\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "in result, msg=rule + \" failed\"+str(result)) def test_mwrules_correct_simple(self): from preferences", "5]) p1 = DichotomousPreferences([0, 4, 5]) p2 = DichotomousPreferences([1, 2])", "profile, committeesize, tests1) # and now with reversed preflist preflist.reverse()", "profile, committeesize, tests2) def test_mwrules_correct_advanced_3(self): from preferences import Profile self.longMessage", "2, 4]], \"slav-ilp\": [[0, 1, 2, 4]], \"slav-noilp\": [[0, 1,", "tiebreaking is not (yet) # implemented for opt-Phragmen def test_optphrag_notiebreaking(self):", "\"phragmen-enestroem\": [[0, 1, 3]], } run_test_instance(self, profile, committeesize, tests2) def", "5], [0], [1, 4, 5], [1], [2, 4, 5], [2],", "[1, 2, 3]], \"monroe-noilp\": [[0, 1, 3], [0, 2, 3],", "[[0, 1, 2, 3], [0, 1, 2, 4], [0, 2,", "with self.assertRaises(Exception): prof.add_preferences([0, 4, 5, \"1\"]) with self.assertRaises(Exception): prof.add_preferences([\"1\", 0,", "\"seqcc\": [[0, 1, 2, 4], [0, 1, 2, 5], [0,", "True committeesize = 4 profile = Profile(6) preflist = [[0,", "committeesize, resolute=True) unittestinstance.assertEqual( len(output), 1, msg=rules_approval.MWRULES[rule] + \" failed with", "3, 5], [0, 1, 4, 5], [0, 2, 3, 4],", "Profile import rules_approval profile = Profile(5) committeesize = 4 preflist", "2, 3, 5], [1, 2, 4, 5]], \"cc-noilp\": [[0, 1,", "\"monroe\" in rule or \"rule-x\" in rule: # Monroe and", "committeesize, tests2) def test_mwrules_correct_advanced_3(self): from preferences import Profile self.longMessage =", "committeesize)), 6, msg=rule + \" failed\") for rule in rules_approval.MWRULES.keys():", "2, 3, 5], [1, 2, 4, 5], [1, 3, 4,", "3, 5], [0, 2, 3, 4], [0, 2, 3, 5],", "5], [2, 3, 4, 5]], \"revseqpav\": [[0, 1, 4, 5],", "profile, committeesize) with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize, resolute=True) def test_mwrules_weightsconsidered(self):", "[1, 2, 3]], \"cc-ilp\": [[0, 1, 3], [0, 2, 3],", "[1, 4], [2, 4], [2, 5], [2, 5]]) committeesize =", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"phrag\":", "[0, 1, 2, 4], [0, 1, 2, 5], [0, 2,", "\"slav-noilp\": [[0, 1, 3]], \"rule-x\": [[0, 1, 3]], \"phragmen-enestroem\": [[0,", "[1, 3, 4, 5], [2, 3, 4, 5]], \"sav\": [[0,", "profile.add_preferences([[0], [0], [1, 3], [1, 3], [1, 4], [2, 4],", "\"rule-x\": [[0, 1, 3]], \"phragmen-enestroem\": [[0, 1, 3]], } run_test_instance(self,", "[4], [2], [1, 5, 3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]),", "3, 4], [1, 2, 3, 5]], \"slav-noilp\": [[0, 1, 2,", "[1, 2, 3]], \"greedy-monroe\": [[0, 1, 3]], \"seqslav\": [[0, 1,", "\"revseqcc\": [[0, 1, 2, 3]], \"monroe-ilp\": [[0, 1, 2, 3]],", "for opt-Phragmen def test_optphrag_notiebreaking(self): from preferences import Profile from rules_approval", "[2, 4, 5], [2], [3, 4, 5], [3]] profile.add_preferences(preflist) tests1", "rules_approval profile = Profile(5) committeesize = 4 preflist = [[0,", "[1, 3, 4, 5], [2, 3, 4, 5]], \"phragmen-enestroem\": [[0,", "[2, 3, 4, 5]], } run_test_instance(self, profile, committeesize, tests1) #", "\"monroe-noilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]],", "Profile(4) profile.add_preferences([[0], [0], [0], [1, 2], [1, 2], [1], [3]])", "rule: # Monroe and rule x only work with unit", "4]], \"revseqcc\": [[0, 1, 3], [0, 2, 3], [0, 3,", "[0, 2, 5], [2], [0, 1, 2, 3, 4], [0,", "= Profile(4) profile.add_preferences([[0], [1], [2], [3]]) committeesize = 2 for", "4, 5], 2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4) def test_mwrules__toofewcandidates(self): from preferences", "3, 5]], \"seqslav\": [[0, 1, 2, 4], [0, 1, 2,", "4, 5, \"1\"]) with self.assertRaises(Exception): prof.add_preferences([\"1\", 0, 4, 5]) p1", "\"seqcc\": [[0, 1, 3], [0, 2, 3], [0, 3, 4],", "5], [1, 2, 4, 5]], \"monroe-ilp\": [[0, 1, 2, 3],", "3], [0, 2, 3], [1, 2, 3]], \"minimaxav-ilp\": [[0, 1,", "and rule x only work with unit weights: continue result", "3, 4]], \"revseqcc\": [[0, 1, 3], [0, 2, 3], [0,", "4]], \"slav-ilp\": [[0, 1, 2, 4]], \"slav-noilp\": [[0, 1, 2,", "[0, 1, 2], [0, 1], [3, 4], [3, 4], [3]]", "2, 3, 5], [1, 2, 4, 5]], \"greedy-monroe\": [[0, 1,", "rules_approval.compute_rule(rule, profile, committeesize, resolute=True) def test_mwrules_weightsconsidered(self): from preferences import Profile", "1, 3], [0, 2, 3], [1, 2, 3]], \"phrag\": [[0,", "always returns one committee continue self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)), 6, msg=rule", "[[0, 1, 3]], \"slav-noilp\": [[0, 1, 3]], \"rule-x\": [[0, 1,", "3], [0, 1, 2, 4], [0, 1, 2, 5], [0,", "[0, 1, 2], [0, 1, 2], [0, 1, 2], [0,", "\" failed with resolute=True\") unittestinstance.assertTrue( output[0] in tests[rule], msg=rules_approval.MWRULES[rule] +", "preferences import Profile self.longMessage = True # and another profile", "rule in tests.keys(): output = rules_approval.compute_rule(rule, profile, committeesize, resolute=False) unittestinstance.assertEqual(", "None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4) def", "4, 5]), None) with self.assertRaises(Exception): prof.add_preferences([0, 4, 5, \"1\"]) with", "[[0, 1, 2, 3]], \"monroe-noilp\": [[0, 1, 2, 3]], \"greedy-monroe\":", "\"slav-ilp\": [[0, 1, 2, 4]], \"slav-noilp\": [[0, 1, 2, 4]],", "4, 5]], \"monroe-ilp\": [[0, 1, 2, 3], [0, 1, 2,", "5]], \"revseqpav\": [[0, 1, 4, 5], [0, 2, 4, 5],", "committeesize, tests): import rules_approval # all rules used? for rule", "\"monroe-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "resolute=True\") def test_monroe_indivisible(self): from preferences import Profile import rules_approval self.longMessage", "2, 3]], \"minimaxav-ilp\": [[0, 1, 3], [0, 2, 3], [1,", "in rule or \"rule-x\" in rule: # Monroe and rule", "3], [1, 3, 4]], \"cc-noilp\": [[0, 1, 3], [0, 2,", "import rules_approval self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [1],", "committeesize, tests1) def test_mwrules_correct_advanced_2(self): from preferences import Profile self.longMessage =", "[[0, 1, 2, 3]], \"seqslav\": [[0, 1, 2, 4]], \"slav-ilp\":", "test_monroe_indivisible(self): from preferences import Profile import rules_approval self.longMessage = True", "run_test_instance(self, profile, committeesize, tests3) def test_monroescore(self): from preferences import Profile", "[2, 3, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0,", "p2]), None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4)", "2, 3, 5]], \"revseqcc\": [[0, 1, 2, 3]], \"monroe-ilp\": [[0,", "prof = Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4, 5])), None) with self.assertRaises(Exception):", "= True profile = Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0]))", "4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1, 2,", "for rule in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize, resolute=True)), 1, msg=rule", "Profile from rules_approval import compute_rule self.longMessage = True profile =", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"pav-noilp\":", "\"sav\": [[0, 1, 2, 4]], \"pav-ilp\": [[0, 1, 2, 4]],", "3, 4, 5], [2, 3, 4, 5]], \"pav-noilp\": [[0, 1,", "[[0, 1, 2, 3]], \"cc-ilp\": [[0, 1, 2, 3]], \"cc-noilp\":", "[[0, 1, 2], [0, 1, 2], [0, 1, 2], [0,", "unittestinstance.assertEqual( len(output), 1, msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") unittestinstance.assertTrue(", "\"phragmen-enestroem\": [[0, 1, 4, 5], [0, 2, 4, 5], [0,", "5]], \"seqcc\": [[0, 1, 2, 3], [0, 1, 2, 4],", "4], [1, 2, 3, 5]], \"slav-noilp\": [[0, 1, 2, 3],", "2, 4, 5]], \"greedy-monroe\": [[0, 1, 2, 3]], \"seqslav\": [[0,", "\"monroe-noilp\": [[0, 1, 2, 3]], \"greedy-monroe\": [[0, 2, 3, 4]],", "self.longMessage = True # and another profile profile = Profile(5)", "4, 5], [0, 3, 4, 5], [1, 2, 4, 5],", "2, 3]], \"seqslav\": [[0, 1, 2, 4]], \"slav-ilp\": [[0, 1,", "4], [1, 2, 3, 5]], \"rule-x\": [[0, 1, 4, 5],", "4, 5]], \"phrag\": [[0, 1, 4, 5], [0, 2, 4,", "\"seqpav\": [[0, 1, 4, 5], [0, 2, 4, 5], [0,", "1, 3]], \"av\": [[0, 1, 2]], \"sav\": [[0, 1, 3]],", "5], [2, 3, 4, 5]], \"pav-noilp\": [[0, 1, 4, 5],", "[1, 2, 3, 4], [1, 2, 3, 5]], \"rule-x\": [[0,", "preferences import Profile from preferences import DichotomousPreferences import rules_approval self.longMessage", "2, 3], [1, 2, 3]], \"phrag\": [[0, 1, 3]], \"optphrag\":", "1, 3]], \"minimaxav-noilp\": [[0, 1, 3], [0, 2, 3], [1,", "profile.add_preferences(preflist) tests1 = { \"seqpav\": [[0, 1, 4, 5], [0,", "rules_approval import compute_rule self.longMessage = True profile = Profile(6) profile.add_preferences([[0],", "4]], \"pav-ilp\": [[0, 1, 2, 4]], \"pav-noilp\": [[0, 1, 2,", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"av\":", "5], [2, 3, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3],", "[[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"minimaxav-ilp\":", "[1, 2, 3], [1, 3, 4]], \"cc-noilp\": [[0, 1, 3],", "4]], } run_test_instance(self, profile, committeesize, tests3) def test_monroescore(self): from preferences", "5, 3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_matching(profile, [1,", "[[0, 1, 3]], \"minimaxav-noilp\": [[0, 1, 3], [0, 2, 3],", "1, msg=rule + \" failed with resolute=True\") def test_monroe_indivisible(self): from", "self.assertEqual( len(compute_rule(\"optphrag\", profile, committeesize, resolute=False)), 12) def test_mwrules_correct_advanced_1(self): from preferences", "continue result = rules_approval.compute_rule(rule, profile, committeesize) self.assertTrue([1] in result, msg=rule", "profile = Profile(6) preflist = [[0, 4, 5], [0], [1,", "3, 4, 5]], \"optphrag\": [[0, 1, 2, 3]], \"cc-ilp\": [[0,", "with resolute=True\") unittestinstance.assertTrue( output[0] in tests[rule], msg=rules_approval.MWRULES[rule] + \" failed", "ilp in [True, False]: # max Monroe score is 6", "resolute=False) unittestinstance.assertEqual( output, tests[rule], msg=rules_approval.MWRULES[rule] + \" failed\") output =", "[1], [1, 3], [4], [2], [1, 5, 3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile,", "[[0, 1, 2, 4]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0,", "4, 5], [2, 3, 4, 5]], \"sav\": [[0, 1, 2,", "Unit tests import unittest def run_test_instance(unittestinstance, profile, committeesize, tests): import", "5], [2, 3, 4, 5]], \"phragmen-enestroem\": [[0, 1, 4, 5],", "p in preflist: p.reverse() profile = Profile(6) profile.add_preferences(preflist) run_test_instance(self, profile,", "2, 3], [1, 3, 4]], \"monroe-ilp\": [[0, 1, 3], [0,", "score_functions import monroescore_flowbased, monroescore_matching self.longMessage = True # and a", "if \"monroe\" in rule or \"rule-x\" in rule: # Monroe", "[2, 1, 5]), 4) self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]), 3) self.assertEqual(monroescore_matching(profile,", "def test_mwrules_correct_advanced_3(self): from preferences import Profile self.longMessage = True #", "profile profile = Profile(6) committeesize = 4 preflist = [[0,", "= 3 self.assertEqual( len(compute_rule(\"optphrag\", profile, committeesize, resolute=False)), 12) def test_mwrules_correct_advanced_1(self):", "2, 3, 4], [1, 2, 3, 5]], \"rule-x\": [[0, 1,", "3, 5], [1, 2, 4, 5]], \"monroe-noilp\": [[0, 1, 2,", "[1, 3, 4, 5], [2, 3, 4, 5]], \"av\": [[0,", "[1, 2], [0]] profile.add_preferences(preflist) for rule in rules_approval.MWRULES.keys(): with self.assertRaises(Exception):", "rule or \"rule-x\" in rule: # Monroe and rule x", "now with reversed preflist preflist.reverse() for p in preflist: p.reverse()", "5], [2], [3, 4, 5], [3]] profile.add_preferences(preflist) tests1 = {", "from preferences import Profile import rules_approval self.longMessage = True profile", "tests1) def test_mwrules_correct_advanced_2(self): from preferences import Profile self.longMessage = True", "1, 2, 4], [0, 1, 2, 5], [0, 2, 3,", "[0, 2, 3], [1, 2, 3]], \"minimaxav-ilp\": [[0, 1, 3],", "= 1 for rule in rules_approval.MWRULES.keys(): if \"monroe\" in rule", "3]], \"pav-ilp\": [[0, 1, 3]], \"pav-noilp\": [[0, 1, 3]], \"revseqpav\":", "4, 5], [0, 3, 4, 5], [1, 2, 3, 4],", "[2], [1, 5, 3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]), 5)", "[1, 2, 3, 5]], \"seqslav\": [[0, 1, 2, 4], [0,", "from preferences import DichotomousPreferences import rules_approval self.longMessage = True profile", "[0, 2, 3, 5], [0, 2, 4, 5]], \"minimaxav-ilp\": [[0,", "with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4, 5]), None) with self.assertRaises(Exception): prof.add_preferences([0,", "[[0, 1, 3], [0, 2, 3], [0, 3, 4], [1,", "is 6 (even for committee [0, 1, 3]) self.assertEqual( rules_approval.compute_monroe(profile,", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"optphrag\":", "[1, 3, 4, 5], [2, 3, 4, 5]], \"phrag\": [[0,", "import unittest def run_test_instance(unittestinstance, profile, committeesize, tests): import rules_approval #", "4) self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]), 3) self.assertEqual(monroescore_matching(profile, [2, 5, 4]),", "5], [2], [0, 1, 2, 3, 4], [0, 3, 4],", "\"revseqcc\": [[0, 1, 3], [0, 2, 3], [0, 3, 4],", "4], [1, 2, 3, 5]], \"seqslav\": [[0, 1, 2, 4],", "2, 3, 5]], \"rule-x\": [[0, 1, 4, 5], [0, 2,", "rules_approval.compute_rule(rule, profile, committeesize, resolute=False) unittestinstance.assertEqual( output, tests[rule], msg=rules_approval.MWRULES[rule] + \"", "4, 5], [0], [1, 4, 5], [1], [2, 4, 5],", "3]], \"rule-x\": [[0, 1, 3]], \"phragmen-enestroem\": [[0, 1, 3]], }", "[1, 2, 4, 5]], \"seqcc\": [[0, 1, 2, 3], [0,", "for committee [0, 1, 3]) self.assertEqual( rules_approval.compute_monroe(profile, committeesize, ilp=ilp, resolute=False),", "{ \"seqpav\": [[0, 1, 2, 4]], \"av\": [[0, 1, 2,", "[1, 3, 4]], \"monroe-ilp\": [[0, 1, 3], [0, 2, 3],", "Profile from preferences import DichotomousPreferences import rules_approval self.longMessage = True", "3, 4], [1, 2, 3, 5], [1, 2, 4, 5]],", "4, 5]], \"greedy-monroe\": [[0, 1, 2, 3]], \"seqslav\": [[0, 1,", "4, 5]], \"phrag\": [[0, 1, 2, 4]], \"optphrag\": [[0, 1,", "[0, 2, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0,", "from score_functions import monroescore_flowbased, monroescore_matching self.longMessage = True # and", "committeesize = 2 for rule in rules_approval.MWRULES.keys(): if rule ==", "import Profile from preferences import DichotomousPreferences num_cand = 7 prof", "\"seqslav\": [[0, 1, 2, 4], [0, 1, 2, 5], [0,", "resolute=True\") class TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self): from preferences import Profile from", "Profile self.longMessage = True # and another profile profile =", "\"seqslav\": [[0, 1, 2, 4]], \"slav-ilp\": [[0, 1, 2, 4]],", "5], [2, 3, 4, 5]], \"minimaxav-noilp\": [[0, 1, 2, 3],", "3]], } run_test_instance(self, profile, committeesize, tests2) def test_mwrules_correct_advanced_3(self): from preferences", "tests): import rules_approval # all rules used? for rule in", "tests2 = { \"seqpav\": [[0, 1, 3]], \"av\": [[0, 1,", "5], [1, 2, 3, 4], [1, 2, 3, 5], [1,", "failed with resolute=True\") def test_monroe_indivisible(self): from preferences import Profile import", "[[0, 1, 2, 4]], \"rule-x\": [[0, 1, 2, 4]], \"phragmen-enestroem\":", "in rules_approval.MWRULES.keys(): if \"monroe\" in rule or \"rule-x\" in rule:", "a third profile profile = Profile(6) preflist = [[0, 1],", "4], [3, 4], [3]] profile.add_preferences(preflist) tests2 = { \"seqpav\": [[0,", "5) self.assertEqual(monroescore_matching(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]),", "all rules used? for rule in rules_approval.MWRULES: unittestinstance.assertTrue(rule in tests.keys())", "# always returns one committee continue self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)), 6,", "[[0, 1, 2, 4], [0, 1, 2, 5], [0, 1,", "3, 4, 5], [2, 3, 4, 5]], } run_test_instance(self, profile,", "for rule in rules_approval.MWRULES.keys(): with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize) with", "rules_approval.compute_rule(rule, profile, committeesize) with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize, resolute=True) def", "failed\"+str(result)) def test_mwrules_correct_simple(self): from preferences import Profile import rules_approval self.longMessage", "failed\") output = rules_approval.compute_rule( rule, profile, committeesize, resolute=True) unittestinstance.assertEqual( len(output),", "5, \"1\"]) with self.assertRaises(Exception): prof.add_preferences([\"1\", 0, 4, 5]) p1 =", "[1, 2], [1, 2], [1], [3]]) committeesize = 3 for", "shows that tiebreaking is not (yet) # implemented for opt-Phragmen", "profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0])) committeesize = 1 for rule in", "5], [3]] profile.add_preferences(preflist) tests1 = { \"seqpav\": [[0, 1, 4,", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"sav\":", "4], [1, 2, 3], [1, 3, 4]], \"seqcc\": [[0, 1,", "tests[rule], msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") class TestApprovalMultiwinner(unittest.TestCase): def", "[[0, 1, 2, 4]], \"slav-ilp\": [[0, 1, 2, 4]], \"slav-noilp\":", "self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize, resolute=True) def test_mwrules_weightsconsidered(self): from preferences import", "[1, 3, 4, 5], [2, 3, 4, 5]], \"pav-noilp\": [[0,", "resolute=False)), 12) def test_mwrules_correct_advanced_1(self): from preferences import Profile self.longMessage =", "self.longMessage = True committeesize = 4 profile = Profile(6) preflist", "2, 4, 5], [0, 3, 4, 5], [1, 2, 3,", "[0, 2, 3]]) # this test shows that tiebreaking is", "is not (yet) # implemented for opt-Phragmen def test_optphrag_notiebreaking(self): from", "[3, 4], [3]] profile.add_preferences(preflist) tests2 = { \"seqpav\": [[0, 1,", "profile = Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0])) committeesize =", "import Profile from preferences import DichotomousPreferences import rules_approval self.longMessage =", "False]: # max Monroe score is 6 (even for committee", "1, 3, 5], [0, 1, 4, 5], [0, 2, 3,", "3]], \"cc-ilp\": [[0, 1, 3], [0, 2, 3], [0, 3,", "3], [1, 3, 4]], \"revseqcc\": [[0, 1, 3], [0, 2,", "1, 2, 3], [0, 1, 2, 4], [0, 2, 3,", "2, 4, 5]], \"phrag\": [[0, 1, 2, 4]], \"optphrag\": [[0,", "profile.add_preferences(preflist) for rule in rules_approval.MWRULES.keys(): with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize)", "1, 3]], \"pav-ilp\": [[0, 1, 3]], \"pav-noilp\": [[0, 1, 3]],", "= 7 prof = Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4, 5])), None)", "[1, 3, 2]), 5) self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_matching(profile,", "2, 3]]) # this test shows that tiebreaking is not", "4], [1, 2, 3], [1, 3, 4]], \"cc-noilp\": [[0, 1,", "[1, 2, 3], [1, 3, 4]], \"seqcc\": [[0, 1, 3],", "\"av\": [[0, 1, 2, 4], [0, 2, 3, 4]], \"sav\":", "3], [1, 3, 4]], \"seqcc\": [[0, 1, 3], [0, 2,", "2, 3, 5], [1, 2, 4, 5]], \"cc-ilp\": [[0, 1,", "resolute=False), [[0, 1, 2], [0, 1, 3], [0, 2, 3]])", "2], [0, 1, 3], [0, 2, 3]]) # this test", "5], [2, 3, 4, 5]], \"sav\": [[0, 1, 2, 3],", "2, 3]], \"monroe-ilp\": [[0, 1, 2, 3]], \"monroe-noilp\": [[0, 1,", "[1, 3, 4]], \"seqcc\": [[0, 1, 3], [0, 2, 3],", "4], [0, 3, 4], [0, 2, 4], [0, 1]] profile.add_preferences(preflist)", "\"pav-noilp\": [[0, 1, 2, 4]], \"revseqpav\": [[0, 1, 2, 4]],", "[2, 3, 4, 5]], \"optphrag\": [[0, 1, 2, 3]], \"cc-ilp\":", "\" failed with resolute=True\") def test_monroe_indivisible(self): from preferences import Profile", "[3]]) committeesize = 3 for ilp in [True, False]: #", "= Profile(4) profile.add_preferences([[0], [0], [0], [1, 2], [1, 2], [1],", "3]]) # this test shows that tiebreaking is not (yet)", "3, 4], [1, 2, 3, 5], [1, 2, 4, 5],", "class TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self): from preferences import Profile from preferences", "1, 2, 4]], \"phragmen-enestroem\": [[0, 1, 2, 4]], } run_test_instance(self,", "5], [1, 2, 4, 5]], \"seqcc\": [[0, 1, 2, 3],", "2, 3]], \"greedy-monroe\": [[0, 1, 3]], \"seqslav\": [[0, 1, 3]],", "3, 4], [1, 2, 3, 5]], \"seqslav\": [[0, 1, 2,", "p.reverse() profile = Profile(6) profile.add_preferences(preflist) run_test_instance(self, profile, committeesize, tests1) def", "= Profile(6) committeesize = 4 preflist = [[0, 3, 4,", "rule in rules_approval.MWRULES.keys(): with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize) with self.assertRaises(Exception):", "rules_approval.compute_rule(rule, profile, committeesize) self.assertTrue([1] in result, msg=rule + \" failed\"+str(result))", "if rule == \"greedy-monroe\": # always returns one committee continue", "2, 4]], \"rule-x\": [[0, 1, 2, 4]], \"phragmen-enestroem\": [[0, 1,", "Profile(5) committeesize = 3 preflist = [[0, 1, 2], [0,", "= DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1, p2]), None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4, 5],", "= rules_approval.compute_rule(rule, profile, committeesize) self.assertTrue([1] in result, msg=rule + \"", "self.assertRaises(Exception): prof.add_preferences([0, 4, 5, \"1\"]) with self.assertRaises(Exception): prof.add_preferences([\"1\", 0, 4,", "committeesize, ilp=ilp, resolute=False), [[0, 1, 2], [0, 1, 3], [0,", "preferences import Profile self.longMessage = True # and a third", "1, 3]], \"pav-noilp\": [[0, 1, 3]], \"revseqpav\": [[0, 1, 3]],", "5]], } run_test_instance(self, profile, committeesize, tests1) # and now with", "5], [2, 3, 4, 5]], \"pav-ilp\": [[0, 1, 4, 5],", "run_test_instance(self, profile, committeesize, tests1) # and now with reversed preflist", "3, 5], [1, 2, 4, 5]], \"cc-noilp\": [[0, 1, 2,", "test_mwrules_correct_advanced_1(self): from preferences import Profile self.longMessage = True committeesize =", "\"sav\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "5)) profile.add_preferences(DichotomousPreferences([0])) committeesize = 1 for rule in rules_approval.MWRULES.keys(): if", "failed\") for rule in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize, resolute=True)), 1,", "[1, 2, 3, 4], [1, 2, 3, 5]], \"revseqcc\": [[0,", "4, 5], [1, 3, 4, 5], [2, 3, 4, 5]],", "3, 4], [1, 2, 3, 5]], \"rule-x\": [[0, 1, 4,", "3, 4, 5], [2, 3, 4, 5]], \"phragmen-enestroem\": [[0, 1,", "profile, committeesize, tests1) def test_mwrules_correct_advanced_2(self): from preferences import Profile self.longMessage", "for rule in rules_approval.MWRULES.keys(): if rule == \"greedy-monroe\": # always", "\"phrag\": [[0, 1, 2, 4]], \"optphrag\": [[0, 1, 2, 3],", "+ \" failed\") output = rules_approval.compute_rule( rule, profile, committeesize, resolute=True)", "None) with self.assertRaises(Exception): prof.add_preferences([0, 4, 5, \"1\"]) with self.assertRaises(Exception): prof.add_preferences([\"1\",", "1, 2, 3]], \"monroe-noilp\": [[0, 1, 2, 3]], \"greedy-monroe\": [[0,", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"phragmen-enestroem\":", "5]), 4) self.assertEqual(monroescore_matching(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_flowbased(profile, [2, 4,", "self.assertRaises(Exception): prof.add_preferences([\"1\", 0, 4, 5]) p1 = DichotomousPreferences([0, 4, 5])", "preferences import DichotomousPreferences import rules_approval self.longMessage = True profile =", "implemented for opt-Phragmen def test_optphrag_notiebreaking(self): from preferences import Profile from", "True profile = Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0])) committeesize", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"revseqpav\":", "[[0, 2, 3, 4]], \"slav-ilp\": [[0, 1, 2, 3], [0,", "\"slav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "5]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "2, 3, 4], [1, 2, 3, 5]], \"seqslav\": [[0, 1,", "5]], \"cc-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "[1, 2, 3], [1, 3, 4]], \"monroe-ilp\": [[0, 1, 3],", "4, 5]), 3) self.assertEqual(monroescore_matching(profile, [2, 5, 4]), 3) if __name__", "# max Monroe score is 6 (even for committee [0,", "profile, committeesize, resolute=False)), 12) def test_mwrules_correct_advanced_1(self): from preferences import Profile", "3]], \"cc-noilp\": [[0, 1, 2, 3]], \"seqcc\": [[0, 1, 2,", "1, 3]) self.assertEqual( rules_approval.compute_monroe(profile, committeesize, ilp=ilp, resolute=False), [[0, 1, 2],", "profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_matching(profile, [1, 3, 2]),", "= Profile(6) profile.add_preferences([[0], [0], [1, 3], [1, 3], [1, 4],", "Profile(6) committeesize = 4 preflist = [[0, 3, 4, 5],", "1, 3]], \"phragmen-enestroem\": [[0, 1, 3]], } run_test_instance(self, profile, committeesize,", "committee continue self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)), 6, msg=rule + \" failed\")", "[[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"greedy-monroe\":", "resolute=True)), 1, msg=rule + \" failed with resolute=True\") def test_monroe_indivisible(self):", "[1, 2, 4, 5]], \"cc-noilp\": [[0, 1, 2, 3], [0,", "5]], \"revseqcc\": [[0, 1, 2, 3], [0, 1, 2, 4],", "2, 4]], \"pav-ilp\": [[0, 1, 2, 4]], \"pav-noilp\": [[0, 1,", "compute_rule self.longMessage = True profile = Profile(6) profile.add_preferences([[0], [0], [1,", "3]], \"slav-ilp\": [[0, 1, 3]], \"slav-noilp\": [[0, 1, 3]], \"rule-x\":", "prof.add_preferences([0, 4, 5, \"1\"]) with self.assertRaises(Exception): prof.add_preferences([\"1\", 0, 4, 5])", "or \"rule-x\" in rule: # Monroe and rule x only", "preferences import Profile import rules_approval profile = Profile(5) committeesize =", "\" failed\"+str(result)) def test_mwrules_correct_simple(self): from preferences import Profile import rules_approval", "[2, 4, 5]), 3) self.assertEqual(monroescore_matching(profile, [2, 5, 4]), 3) if", "4], [1, 2, 3, 5], [1, 2, 4, 5]], \"monroe-noilp\":", "1, 2, 4]], \"slav-noilp\": [[0, 1, 2, 4]], \"rule-x\": [[0,", "self.assertEqual(monroescore_matching(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]), 4)", "rules_approval self.longMessage = True profile = Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1],", "2, 4, 5], [1, 3, 4, 5], [2, 3, 4,", "4], [0, 1, 2, 5], [0, 2, 3, 4], [0,", "4, 5]], \"cc-noilp\": [[0, 1, 2, 3], [0, 1, 2,", "1, 2, 3, 4], [0, 3, 4], [0, 2, 4],", "3, 4, 5], [1, 2, 3, 4], [1, 2, 3,", "\"phrag\": [[0, 1, 4, 5], [0, 2, 4, 5], [0,", "3, 4]], \"slav-ilp\": [[0, 1, 2, 3], [0, 1, 2,", "\"greedy-monroe\": # always returns one committee continue self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)),", "3]], \"phrag\": [[0, 1, 3]], \"optphrag\": [[0, 1, 3], [0,", "5], [2, 3, 4, 5]], \"optphrag\": [[0, 1, 2, 3]],", "= True committeesize = 4 profile = Profile(6) preflist =", "3, 4, 5]], \"phrag\": [[0, 1, 4, 5], [0, 2,", "2, 4, 5]], \"monroe-ilp\": [[0, 1, 2, 3], [0, 1,", "2, 3]], \"cc-noilp\": [[0, 1, 2, 3]], \"seqcc\": [[0, 1,", "import Profile from rules_approval import compute_rule self.longMessage = True profile", "= [[0, 3, 4, 5], [1, 2], [0, 2, 5],", "1, 3]], \"seqslav\": [[0, 1, 3]], \"slav-ilp\": [[0, 1, 3]],", "3]], \"revseqpav\": [[0, 1, 3]], \"minimaxav-noilp\": [[0, 1, 3], [0,", "4], [1, 2, 3, 5], [1, 2, 4, 5]], \"cc-ilp\":", "2, 4, 5]], \"cc-ilp\": [[0, 1, 2, 3], [0, 1,", "with self.assertRaises(Exception): prof.add_preferences([\"1\", 0, 4, 5]) p1 = DichotomousPreferences([0, 4,", "4, 5], [2, 3, 4, 5]], \"pav-ilp\": [[0, 1, 4,", "4], [0, 1]] profile.add_preferences(preflist) tests3 = { \"seqpav\": [[0, 1,", "2, 4]], \"phragmen-enestroem\": [[0, 1, 2, 4]], } run_test_instance(self, profile,", "[[0, 1, 2, 4]], } run_test_instance(self, profile, committeesize, tests3) def", "3, 4], [0, 2, 4], [0, 1]] profile.add_preferences(preflist) tests3 =", "monroescore_flowbased, monroescore_matching self.longMessage = True # and a third profile", "self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]), 3) self.assertEqual(monroescore_matching(profile, [2, 5, 4]), 3)", "self.assertEqual(prof.add_preferences([p1, p2]), None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(),", "tests[rule], msg=rules_approval.MWRULES[rule] + \" failed\") output = rules_approval.compute_rule( rule, profile,", "import Profile import rules_approval profile = Profile(5) committeesize = 4", "def test_mwrules_correct_simple(self): from preferences import Profile import rules_approval self.longMessage =", "3, 4], [1, 2, 3], [1, 3, 4]], \"revseqcc\": [[0,", "1, 2, 4]], \"pav-ilp\": [[0, 1, 2, 4]], \"pav-noilp\": [[0,", "\"rule-x\": [[0, 1, 4, 5], [0, 2, 4, 5], [0,", "1, 2, 4]], \"slav-ilp\": [[0, 1, 2, 4]], \"slav-noilp\": [[0,", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"pav-ilp\":", "output[0] in tests[rule], msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") class", "rule x only work with unit weights: continue result =", "3, 5]], \"slav-noilp\": [[0, 1, 2, 3], [0, 1, 2,", "\"av\": [[0, 1, 4, 5], [0, 2, 4, 5], [0,", "2, 3, 5], [0, 2, 4, 5]], \"phrag\": [[0, 1,", "4], [1, 2, 3, 5]], \"revseqcc\": [[0, 1, 2, 3]],", "3], [0, 2, 3], [1, 2, 3]], \"greedy-monroe\": [[0, 1,", "5], [0, 3, 4, 5], [1, 2, 4, 5], [1,", "[0, 2, 4, 5], [0, 3, 4, 5], [1, 2,", "3, 4]], \"seqcc\": [[0, 1, 3], [0, 2, 3], [0,", "2], [0, 1, 2], [0, 1, 2], [0, 1, 2],", "4, 5]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1, 2,", "def test_mwrules_correct_advanced_2(self): from preferences import Profile self.longMessage = True #", "third profile profile = Profile(6) committeesize = 4 preflist =", "3, 4, 5], [2, 3, 4, 5]], \"optphrag\": [[0, 1,", "True # and a third profile profile = Profile(6) preflist", "3]], \"monroe-ilp\": [[0, 1, 2, 3]], \"monroe-noilp\": [[0, 1, 2,", "[[0, 1, 3]], \"revseqpav\": [[0, 1, 3]], \"minimaxav-noilp\": [[0, 1,", "committeesize, resolute=False) unittestinstance.assertEqual( output, tests[rule], msg=rules_approval.MWRULES[rule] + \" failed\") output", "3 for ilp in [True, False]: # max Monroe score", "[0, 2, 3, 4]], \"sav\": [[0, 1, 2, 4]], \"pav-ilp\":", "4], [0, 1, 2, 5], [0, 1, 3, 4], [0,", "[0], [1, 2], [1, 2], [1], [3]]) committeesize = 3", "preferences import Profile import rules_approval self.longMessage = True profile =", "[True, False]: # max Monroe score is 6 (even for", "\"pav-noilp\": [[0, 1, 3]], \"revseqpav\": [[0, 1, 3]], \"minimaxav-noilp\": [[0,", "5], [1, 2, 3, 4], [1, 2, 3, 5]], \"seqslav\":", "from preferences import Profile self.longMessage = True # and a", "3], [4], [2], [1, 5, 3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1, 3,", "[1, 2], [0, 2, 5], [2], [0, 1, 2, 3,", "[0, 3, 4, 5], [1, 2, 3, 4], [1, 2,", "2, 3], [1, 2, 3]], \"greedy-monroe\": [[0, 1, 3]], \"seqslav\":", "3]], \"pav-noilp\": [[0, 1, 3]], \"revseqpav\": [[0, 1, 3]], \"minimaxav-noilp\":", "1 for rule in rules_approval.MWRULES.keys(): if \"monroe\" in rule or", "= True # and another profile profile = Profile(5) committeesize", "[0, 2, 3], [1, 2, 3]], \"greedy-monroe\": [[0, 1, 3]],", "2], [0, 1, 2], [0, 1, 2], [0, 1], [3,", "[0, 2, 3, 5], [0, 2, 4, 5], [0, 3,", "\"av\": [[0, 1, 2]], \"sav\": [[0, 1, 3]], \"pav-ilp\": [[0,", "True # and another profile profile = Profile(5) committeesize =", "4]], \"pav-noilp\": [[0, 1, 2, 4]], \"revseqpav\": [[0, 1, 2,", "3], [1, 3], [1, 4], [2, 4], [2, 5], [2,", "3]) self.assertEqual( rules_approval.compute_monroe(profile, committeesize, ilp=ilp, resolute=False), [[0, 1, 2], [0,", "4, 5], [1], [2, 4, 5], [2], [3, 4, 5],", "3], [0, 2, 3], [1, 2, 3]], \"cc-ilp\": [[0, 1,", "[[0, 1, 2, 3]], \"monroe-ilp\": [[0, 1, 2, 3]], \"monroe-noilp\":", "from preferences import Profile self.longMessage = True # and another", "3]], \"greedy-monroe\": [[0, 1, 3]], \"seqslav\": [[0, 1, 3]], \"slav-ilp\":", "+ \" failed with resolute=True\") unittestinstance.assertTrue( output[0] in tests[rule], msg=rules_approval.MWRULES[rule]", "+ \" failed\") for rule in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize,", "True # and a third profile profile = Profile(6) committeesize", "another profile profile = Profile(5) committeesize = 3 preflist =", "[[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"phrag\":", "2, 3], [1, 3, 4]], \"revseqcc\": [[0, 1, 3], [0,", "\"cc-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "3, 5], [0, 2, 4, 5]], \"phrag\": [[0, 1, 2,", "True profile = Profile(6) profile.add_preferences([[0], [0], [1, 3], [1, 3],", "2, 3, 4], [0, 3, 4], [0, 2, 4], [0,", "profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0])) committeesize = 1 for rule", "} run_test_instance(self, profile, committeesize, tests1) # and now with reversed", "True profile = Profile(4) profile.add_preferences([[0], [0], [0], [1, 2], [1,", "[1, 3], [1, 4], [2, 4], [2, 5], [2, 5]])", "= Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0])) committeesize = 1", "3, 4, 5], [2, 3, 4, 5]], \"revseqpav\": [[0, 1,", "1, 2, 4], [0, 2, 3, 4]], \"sav\": [[0, 1,", "3, 4], [0, 2, 3, 5], [0, 2, 4, 5],", "[1], [2], [3]]) committeesize = 2 for rule in rules_approval.MWRULES.keys():", "rule, profile, committeesize, resolute=True) unittestinstance.assertEqual( len(output), 1, msg=rules_approval.MWRULES[rule] + \"", "3], [0, 2, 3], [1, 2, 3]], \"phrag\": [[0, 1,", "Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4, 5])), None) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with", "[0, 2, 3], [1, 2, 3]], \"phrag\": [[0, 1, 3]],", "[[0, 1, 2, 4]], \"pav-ilp\": [[0, 1, 2, 4]], \"pav-noilp\":", "profile = Profile(5) committeesize = 3 preflist = [[0, 1,", "\"cc-ilp\": [[0, 1, 3], [0, 2, 3], [0, 3, 4],", "4], [0, 2, 3, 4], [0, 2, 3, 5], [0,", "None) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4, 5]),", "5]], \"greedy-monroe\": [[0, 1, 2, 3]], \"seqslav\": [[0, 1, 2,", "profile.add_preferences(preflist) run_test_instance(self, profile, committeesize, tests1) def test_mwrules_correct_advanced_2(self): from preferences import", "3], [1, 4], [2, 4], [2, 5], [2, 5]]) committeesize", "= Profile(5) committeesize = 3 preflist = [[0, 1, 2],", "in tests[rule], msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") class TestApprovalMultiwinner(unittest.TestCase):", "4, 5]], } run_test_instance(self, profile, committeesize, tests1) # and now", "3, 4], [0, 3, 4], [0, 2, 4], [0, 1]]", "[1, 2, 3, 5], [1, 2, 4, 5]], \"greedy-monroe\": [[0,", "prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4) def test_mwrules__toofewcandidates(self): from", "4], [0, 1, 3, 5], [0, 1, 4, 5], [0,", "5], [1, 2, 3, 4], [1, 2, 3, 5]], \"revseqcc\":", "msg=rule + \" failed\"+str(result)) def test_mwrules_correct_simple(self): from preferences import Profile", "= rules_approval.compute_rule( rule, profile, committeesize, resolute=True) unittestinstance.assertEqual( len(output), 1, msg=rules_approval.MWRULES[rule]", "{ \"seqpav\": [[0, 1, 4, 5], [0, 2, 4, 5],", "\"seqslav\": [[0, 1, 3]], \"slav-ilp\": [[0, 1, 3]], \"slav-noilp\": [[0,", "4, 5], [2, 3, 4, 5]], \"av\": [[0, 1, 4,", "[0, 2, 4, 5]], \"revseqcc\": [[0, 1, 2, 3], [0,", "5]], \"pav-ilp\": [[0, 1, 4, 5], [0, 2, 4, 5],", "[[0, 1, 3]], \"pav-ilp\": [[0, 1, 3]], \"pav-noilp\": [[0, 1,", "result, msg=rule + \" failed\"+str(result)) def test_mwrules_correct_simple(self): from preferences import", "1, 2, 3]], \"seqcc\": [[0, 1, 2, 4], [0, 1,", "[0, 1, 3], [0, 2, 3]]) # this test shows", "5], [0, 2, 3, 4], [0, 2, 3, 5], [0,", "5], [1, 3, 4, 5], [2, 3, 4, 5]], }", "5]], \"cc-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "[2, 3, 4, 5]], \"revseqpav\": [[0, 1, 4, 5], [0,", "3], [0, 3, 4], [1, 2, 3], [1, 3, 4]],", "5], [2, 5]]) committeesize = 3 self.assertEqual( len(compute_rule(\"optphrag\", profile, committeesize,", "rules_approval.MWRULES.keys(): if \"monroe\" in rule or \"rule-x\" in rule: #", "1, 3]], \"slav-ilp\": [[0, 1, 3]], \"slav-noilp\": [[0, 1, 3]],", "2, 4], [0, 2, 3, 4]], \"sav\": [[0, 1, 2,", "\"cc-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "import rules_approval profile = Profile(5) committeesize = 4 preflist =", "DichotomousPreferences([0, 4, 5]) p2 = DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1, p2]), None)", "[[0, 1, 3]], \"rule-x\": [[0, 1, 3]], \"phragmen-enestroem\": [[0, 1,", "\"pav-ilp\": [[0, 1, 3]], \"pav-noilp\": [[0, 1, 3]], \"revseqpav\": [[0,", "3], [0, 2, 3], [0, 3, 4], [1, 2, 3],", "5])), None) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4,", "[[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"monroe-noilp\":", "3, 5], [1, 2, 3, 4], [1, 2, 3, 5]],", "= Profile(6) preflist = [[0, 1], [1], [1, 3], [4],", "= True profile = Profile(4) profile.add_preferences([[0], [1], [2], [3]]) committeesize", "4, 5], [3]] profile.add_preferences(preflist) tests1 = { \"seqpav\": [[0, 1,", "# and another profile profile = Profile(5) committeesize = 3", "3, 2]), 5) self.assertEqual(monroescore_matching(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_flowbased(profile, [2,", "profile, committeesize, tests3) def test_monroescore(self): from preferences import Profile from", "[1, 3, 2]), 5) self.assertEqual(monroescore_matching(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_flowbased(profile,", "4, 5], [1, 2, 4, 5], [1, 3, 4, 5],", "\" failed\") output = rules_approval.compute_rule( rule, profile, committeesize, resolute=True) unittestinstance.assertEqual(", "len(output), 1, msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") unittestinstance.assertTrue( output[0]", "3, 4], [0, 2, 3, 5], [1, 2, 3, 4],", "self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [0], [0], [1,", "4], [1, 2, 3, 5], [1, 2, 4, 5]], \"monroe-ilp\":", "5]], \"phrag\": [[0, 1, 2, 4]], \"optphrag\": [[0, 1, 2,", "3, 2]), 5) self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_matching(profile, [2,", "= 4 preflist = [[0, 1, 2], [1], [1, 2],", "2], [0, 1], [3, 4], [3, 4], [3]] profile.add_preferences(preflist) tests2", "6 (even for committee [0, 1, 3]) self.assertEqual( rules_approval.compute_monroe(profile, committeesize,", "output, tests[rule], msg=rules_approval.MWRULES[rule] + \" failed\") output = rules_approval.compute_rule( rule,", "run_test_instance(self, profile, committeesize, tests1) def test_mwrules_correct_advanced_2(self): from preferences import Profile", "[1, 2, 3, 5], [1, 2, 4, 5]], \"monroe-ilp\": [[0,", "2, 3], [1, 3, 4]], \"seqcc\": [[0, 1, 3], [0,", "1, 3], [0, 2, 3], [1, 2, 3]], \"monroe-noilp\": [[0,", "1, 2], [0, 1], [3, 4], [3, 4], [3]] profile.add_preferences(preflist)", "3]], \"slav-noilp\": [[0, 1, 3]], \"rule-x\": [[0, 1, 3]], \"phragmen-enestroem\":", "4, 5]], \"av\": [[0, 1, 4, 5], [0, 2, 4,", "rule == \"greedy-monroe\": # always returns one committee continue self.assertEqual(len(rules_approval.compute_rule(rule,", "= 3 preflist = [[0, 1, 2], [0, 1, 2],", "\"minimaxav-noilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]],", "0, 4, 5]) p1 = DichotomousPreferences([0, 4, 5]) p2 =", "ilp=ilp, resolute=False), [[0, 1, 2], [0, 1, 3], [0, 2,", "from preferences import DichotomousPreferences num_cand = 7 prof = Profile(num_cand)", "self.longMessage = True profile = Profile(6) profile.add_preferences([[0], [0], [1, 3],", "5], [1, 2, 4, 5], [1, 3, 4, 5], [2,", "[0, 1, 2, 4], [0, 1, 2, 5], [0, 1,", "3, 5]], \"rule-x\": [[0, 1, 4, 5], [0, 2, 4,", "[1, 3, 4]], \"revseqcc\": [[0, 1, 3], [0, 2, 3],", "profile = Profile(4) profile.add_preferences([[0], [0], [0], [1, 2], [1, 2],", "4, 5]], \"phragmen-enestroem\": [[0, 1, 4, 5], [0, 2, 4,", "3]], \"seqslav\": [[0, 1, 3]], \"slav-ilp\": [[0, 1, 3]], \"slav-noilp\":", "[3]]) committeesize = 2 for rule in rules_approval.MWRULES.keys(): if rule", "5], [0, 2, 4, 5]], \"revseqcc\": [[0, 1, 2, 3],", "5], 2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4) def test_mwrules__toofewcandidates(self): from preferences import", "unittestinstance.assertTrue( output[0] in tests[rule], msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\")", "4]], \"monroe-ilp\": [[0, 1, 3], [0, 2, 3], [1, 2,", "continue self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)), 6, msg=rule + \" failed\") for", "3, 5], [0, 2, 4, 5]], \"revseqcc\": [[0, 1, 2,", "[2, 3, 4, 5]], \"sav\": [[0, 1, 2, 3], [0,", "1, 3], [0, 2, 3], [1, 2, 3]], \"minimaxav-ilp\": [[0,", "def test_optphrag_notiebreaking(self): from preferences import Profile from rules_approval import compute_rule", "profile, committeesize, resolute=False) unittestinstance.assertEqual( output, tests[rule], msg=rules_approval.MWRULES[rule] + \" failed\")", "and a third profile profile = Profile(6) committeesize = 4", "[[0, 1, 2, 4], [0, 2, 3, 4]], \"sav\": [[0,", "4], [0, 2, 3, 5], [0, 2, 4, 5], [1,", "self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize) with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize, resolute=True)", "3]], \"greedy-monroe\": [[0, 2, 3, 4]], \"slav-ilp\": [[0, 1, 2,", "[0, 1, 2], [0, 1, 2], [0, 1], [3, 4],", "= Profile(5) committeesize = 4 preflist = [[0, 1, 2],", "[1, 2, 3]], \"phrag\": [[0, 1, 3]], \"optphrag\": [[0, 1,", "[0, 2, 3], [0, 3, 4], [1, 2, 3], [1,", "} run_test_instance(self, profile, committeesize, tests3) def test_monroescore(self): from preferences import", "2]), 5) self.assertEqual(monroescore_matching(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_flowbased(profile, [2, 1,", "\"sav\": [[0, 1, 3]], \"pav-ilp\": [[0, 1, 3]], \"pav-noilp\": [[0,", "\"1\"]) with self.assertRaises(Exception): prof.add_preferences([\"1\", 0, 4, 5]) p1 = DichotomousPreferences([0,", "1, 2, 4]], \"rule-x\": [[0, 1, 2, 4]], \"phragmen-enestroem\": [[0,", "3], [0, 2, 3], [1, 2, 3]], \"monroe-noilp\": [[0, 1,", "5], [1, 2, 4, 5]], \"cc-noilp\": [[0, 1, 2, 3],", "profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0])) committeesize = 1 for rule in rules_approval.MWRULES.keys():", "3, 5], [1, 2, 4, 5]], \"seqcc\": [[0, 1, 2,", "def test_monroe_indivisible(self): from preferences import Profile import rules_approval self.longMessage =", "[1, 3, 4, 5], [2, 3, 4, 5]], \"minimaxav-noilp\": [[0,", "5]], \"monroe-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "4]], \"cc-noilp\": [[0, 1, 3], [0, 2, 3], [0, 3,", "3]], \"phragmen-enestroem\": [[0, 1, 3]], } run_test_instance(self, profile, committeesize, tests2)", "4, 5], [2, 3, 4, 5]], \"phrag\": [[0, 1, 4,", "preflist = [[0, 1, 2], [0, 1, 2], [0, 1,", "in [True, False]: # max Monroe score is 6 (even", "[[0, 1, 2, 4]], \"av\": [[0, 1, 2, 4], [0,", "3) self.assertEqual(monroescore_matching(profile, [2, 5, 4]), 3) if __name__ == '__main__':", "import DichotomousPreferences import rules_approval self.longMessage = True profile = Profile(3)", "5]], \"pav-noilp\": [[0, 1, 4, 5], [0, 2, 4, 5],", "1, msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") unittestinstance.assertTrue( output[0] in", "test_mwrules_weightsconsidered(self): from preferences import Profile from preferences import DichotomousPreferences import", "[2, 5], [2, 5]]) committeesize = 3 self.assertEqual( len(compute_rule(\"optphrag\", profile,", "4]], \"optphrag\": [[0, 1, 2, 3], [0, 1, 2, 4],", "1, 2, 4]], \"pav-noilp\": [[0, 1, 2, 4]], \"revseqpav\": [[0,", "1, 2], [1], [1, 2], [0]] profile.add_preferences(preflist) for rule in", "test_mwrules_correct_advanced_2(self): from preferences import Profile self.longMessage = True # and", "rules_approval.compute_monroe(profile, committeesize, ilp=ilp, resolute=False), [[0, 1, 2], [0, 1, 3],", "[1, 3, 4, 5], [2, 3, 4, 5]], } run_test_instance(self,", "5]), 4) self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]), 3) self.assertEqual(monroescore_matching(profile, [2, 5,", "3, 4, 5]], \"revseqpav\": [[0, 1, 4, 5], [0, 2,", "4], [0, 2, 4], [0, 1]] profile.add_preferences(preflist) tests3 = {", "1, 2, 4]], \"optphrag\": [[0, 1, 2, 3], [0, 1,", "1, 2, 3]], \"cc-noilp\": [[0, 1, 2, 3]], \"seqcc\": [[0,", "committeesize = 4 profile = Profile(6) preflist = [[0, 4,", "preferences import Profile from rules_approval import compute_rule self.longMessage = True", "5], [0, 2, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3],", "4], [1, 2, 3, 5], [1, 2, 4, 5]], \"seqcc\":", "# and a third profile profile = Profile(6) preflist =", "2, 4]], \"av\": [[0, 1, 2, 4], [0, 2, 3,", "3 self.assertEqual( len(compute_rule(\"optphrag\", profile, committeesize, resolute=False)), 12) def test_mwrules_correct_advanced_1(self): from", "2], [1], [1, 2], [0]] profile.add_preferences(preflist) for rule in rules_approval.MWRULES.keys():", "profile = Profile(4) profile.add_preferences([[0], [1], [2], [3]]) committeesize = 2", "[[0, 1, 3]], \"av\": [[0, 1, 2]], \"sav\": [[0, 1,", "2, 3], [1, 3, 4]], \"cc-noilp\": [[0, 1, 3], [0,", "unit weights: continue result = rules_approval.compute_rule(rule, profile, committeesize) self.assertTrue([1] in", "self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4, 5]), None) with", "[[0, 1, 2, 4]], \"slav-noilp\": [[0, 1, 2, 4]], \"rule-x\":", "= rules_approval.compute_rule(rule, profile, committeesize, resolute=False) unittestinstance.assertEqual( output, tests[rule], msg=rules_approval.MWRULES[rule] +", "p2 = DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1, p2]), None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4,", "not (yet) # implemented for opt-Phragmen def test_optphrag_notiebreaking(self): from preferences", "3, 4, 5]], \"pav-ilp\": [[0, 1, 4, 5], [0, 2,", "= True # and a third profile profile = Profile(6)", "2, 4, 5]], \"revseqcc\": [[0, 1, 2, 3], [0, 1,", "\"revseqpav\": [[0, 1, 2, 4]], \"minimaxav-noilp\": [[0, 1, 2, 3],", "1, 3, 4], [0, 1, 3, 5], [0, 2, 3,", "test_mwrules_correct_advanced_3(self): from preferences import Profile self.longMessage = True # and", "4, 5], [1, 2], [0, 2, 5], [2], [0, 1,", "2, 3, 5], [0, 2, 4, 5]], \"revseqcc\": [[0, 1,", "1], [3, 4], [3, 4], [3]] profile.add_preferences(preflist) tests2 = {", "preflist = [[0, 1], [1], [1, 3], [4], [2], [1,", "for rule in tests.keys(): output = rules_approval.compute_rule(rule, profile, committeesize, resolute=False)", "5]], \"slav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "[2], [3, 4, 5], [3]] profile.add_preferences(preflist) tests1 = { \"seqpav\":", "= { \"seqpav\": [[0, 1, 3]], \"av\": [[0, 1, 2]],", "2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4) def test_mwrules__toofewcandidates(self): from preferences import Profile", "DichotomousPreferences import rules_approval self.longMessage = True profile = Profile(3) profile.add_preferences(DichotomousPreferences([0]))", "\"rule-x\" in rule: # Monroe and rule x only work", "profile, committeesize, resolute=True) unittestinstance.assertEqual( len(output), 1, msg=rules_approval.MWRULES[rule] + \" failed", "x only work with unit weights: continue result = rules_approval.compute_rule(rule,", "rules_approval.compute_rule( rule, profile, committeesize, resolute=True) unittestinstance.assertEqual( len(output), 1, msg=rules_approval.MWRULES[rule] +", "resolute=True\") unittestinstance.assertTrue( output[0] in tests[rule], msg=rules_approval.MWRULES[rule] + \" failed with", "preferences import Profile self.longMessage = True committeesize = 4 profile", "[0, 2, 3, 4], [0, 2, 3, 5], [1, 2,", "\"monroe-ilp\": [[0, 1, 2, 3]], \"monroe-noilp\": [[0, 1, 2, 3]],", "1, 2]], \"sav\": [[0, 1, 3]], \"pav-ilp\": [[0, 1, 3]],", "3], [1, 2, 3]], \"phrag\": [[0, 1, 3]], \"optphrag\": [[0,", "3], [1, 2, 3]], \"monroe-noilp\": [[0, 1, 3], [0, 2,", "import Profile self.longMessage = True # and a third profile", "= 4 preflist = [[0, 3, 4, 5], [1, 2],", "preflist = [[0, 4, 5], [0], [1, 4, 5], [1],", "[[0, 1, 2, 4]], \"optphrag\": [[0, 1, 2, 3], [0,", "self.assertEqual(monroescore_matching(profile, [2, 5, 4]), 3) if __name__ == '__main__': unittest.main()", "3, 4], [0, 2, 3, 5], [0, 2, 4, 5]],", "+ \" failed with resolute=True\") def test_monroe_indivisible(self): from preferences import", "import Profile self.longMessage = True committeesize = 4 profile =", "1, 3]], \"rule-x\": [[0, 1, 3]], \"phragmen-enestroem\": [[0, 1, 3]],", "1, 3]], } run_test_instance(self, profile, committeesize, tests2) def test_mwrules_correct_advanced_3(self): from", "self.longMessage = True # and a third profile profile =", "12) def test_mwrules_correct_advanced_1(self): from preferences import Profile self.longMessage = True", "2], [1], [3]]) committeesize = 3 for ilp in [True,", "[1, 2, 3, 4], [1, 2, 3, 5]], \"seqslav\": [[0,", "4]], \"av\": [[0, 1, 2, 4], [0, 2, 3, 4]],", "with resolute=True\") class TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self): from preferences import Profile", "[[0, 1, 2, 4]], \"phragmen-enestroem\": [[0, 1, 2, 4]], }", "[[0, 1, 2], [1], [1, 2], [0]] profile.add_preferences(preflist) for rule", "1, 2, 3]], \"seqslav\": [[0, 1, 2, 4]], \"slav-ilp\": [[0,", "for rule in rules_approval.MWRULES.keys(): if \"monroe\" in rule or \"rule-x\"", "\"slav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "import Profile from score_functions import monroescore_flowbased, monroescore_matching self.longMessage = True", "[[0, 1, 2, 3]], \"cc-noilp\": [[0, 1, 2, 3]], \"seqcc\":", "2, 4], [0, 2, 3, 4], [0, 2, 3, 5]," ]
[ "# Solve index reading problem, pandas add 2 index to", "'139807', '139808', '139809', '139810', '139811', '139812' ] # The list", "me_df.reset_index(drop=True, inplace=True) # Read rahavard 365 data for calculating returns", "month ME into two groups conditions = [ ( portfo_const_df['market_cap']", "] portfolio_size = np.select(conditions, ['B', 'S']).tolist() portfo_const_df.insert(6, 'size', portfolio_size) #", "11], names=['date', 'open', 'market_cap'], na_values='-' ) # Change order from", "market cap # Concat all 75 tickers' data me_list =", "sl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SL' ]['ticker_num'].tolist() # Calculating value-weighted", "dates df['date'] = df['date'].apply( lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) # Create", "7], names=['date', 'close'], header=0, dtype={'date': str}, parse_dates=[0] ) # Solve", "need for calculating market cap me_months = [ '139312', '139401',", "portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect portfolio ticker numbers portfo_const_df['portfolio'] = ( portfo_const_df['size']", "ticker_condition ]['close'].values[0] previous_month = me_months[me_months.index(month) - 1] t_1_condtion = (merged_df['date']", "old to new dates me_df = me_df[::-1].reset_index(drop=True) me_df['date'] = me_df['date'].str.replace('-',", "= merged_df.loc[month_condition & sl_condition] # Calculate value-weighted returns bh_return =", "mom = ( ((sh_return + bh_return) / 2) - ((sl_return", "= (merged_df['ticker_num'] == ticker) try: t_13_price = merged_df.loc[ t_13_condtion &", "# Calculating value-weighted return for each portfolio in month t", "x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) # Create monthly dataframe df = df.groupby(df['date'].str[:6]).last()", "to new dates me_df = me_df[::-1].reset_index(drop=True) me_df['date'] = me_df['date'].str.replace('-', '')", "pandas as pd import numpy as np import jdatetime pd.options.mode.chained_assignment", "== 'BH' ]['ticker_num'].tolist() bl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BL' ]['ticker_num'].tolist()", "me_months = [ '139312', '139401', '139402', '139403', '139404', '139405', '139406',", "# Extrect portfolio ticker numbers portfo_const_df['portfolio'] = ( portfo_const_df['size'] +", "Calculating value-weighted return for each portfolio in month t #", "= pd.concat(me_list, ignore_index=True) me_df = me_df.loc[(me_df['date'] >= '139212') & (me_df['date']", "index df for indicating open market days index_path = r'E:\\Thesis\\New", "value-weighted returns bh_return = np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap ) bl_return =", "sl_portfolio = merged_df.loc[month_condition & sl_condition] # Calculate value-weighted returns bh_return", "] = t_13_price except: pass # Calculate last 12 months", "= (merged_df['date'] == month) bh_condition = merged_df['ticker_num'].isin(bh) bl_condition = merged_df['ticker_num'].isin(bl)", "= [0, .3, .7, 1] labels = ['L', 'M', 'H']", "me_df = me_df.loc[(me_df['date'] >= '139212') & (me_df['date'] <= '139900')] me_df.reset_index(drop=True,", "'139409', '139410', '139411', '139412', '139501', '139502', '139503', '139504', '139505', '139506',", "portfo_const_df['size'] == 'S' ]['past_year_return'] s_mom = pd.qcut(x=x_s, q=q, labels=labels).to_dict() portfo_const_df['mom']", ") bh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BH' ]['ticker_num'].tolist() bl =", "b_mom = pd.qcut(x=x_b, q=q, labels=labels).to_dict() x_s = portfo_const_df.loc[ portfo_const_df['size'] ==", "pd.concat(me_list, ignore_index=True) me_df = me_df.loc[(me_df['date'] >= '139212') & (me_df['date'] <=", "merged_df.loc[month_condition & bl_condition] sh_portfolio = merged_df.loc[month_condition & sh_condition] sl_portfolio =", "# Change order from old to new dates me_df =", "'t-13 price' ] = t_13_price except: pass # Calculate last", "each month ME into two groups conditions = [ (", "merged_df.loc[month_condition & bh_condition] bl_portfolio = merged_df.loc[month_condition & bl_condition] sh_portfolio =", "merged_df['ticker_num'].isin(bl) sh_condition = merged_df['ticker_num'].isin(sh) sl_condition = merged_df['ticker_num'].isin(sl) # Construct portfolios", "q=q, labels=labels).to_dict() portfo_const_df['mom'] = pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect portfolio ticker", "ticker) try: t_13_price = merged_df.loc[ t_13_condtion & ticker_condition ]['close'].values[0] previous_month", "== 'BL' ]['ticker_num'].tolist() sh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SH' ]['ticker_num'].tolist()", "- Bourseview/'\\ f'{file_number}.xlsx' me_df = pd.read_excel( me_path, skiprows=7, usecols=[2, 3,", "= (merged_df['date'] == t_13) ticker_condition = (merged_df['ticker_num'] == ticker) try:", "'139806', '139807', '139808', '139809', '139810', '139811', '139812' ] # The", "== previous_month) merged_df.loc[ (t_1_condtion & ticker_condition), 't-13 price' ] =", "= index_df['date'].str[:6].unique().tolist() # The list of months that we need", "price condition and t-1 market cap condition previous_month = months[months.index(month)", "'139508', '139509', '139510', '139511', '139512', '139601', '139602', '139603', '139604', '139605',", "'size', portfolio_size) # Split each me portfolio into 3 MOM", "'139601', '139602', '139603', '139604', '139605', '139606', '139607', '139608', '139609', '139610',", ") index_df.dropna(inplace=True) # The list of all months months =", "# Read rahavard 365 data for calculating returns close_list =", "'139703', '139704', '139705', '139706', '139707', '139708', '139709', '139710', '139711', '139712',", "df.reset_index(drop=True, inplace=True) # Convert to shamsi dates df['date'] = df['date'].apply(", "bh_condition = merged_df['ticker_num'].isin(bh) bl_condition = merged_df['ticker_num'].isin(bl) sh_condition = merged_df['ticker_num'].isin(sh) sl_condition", "pd.read_excel( index_path, usecols=[1], names=['date'], dtype={'date': str} ) index_df.dropna(inplace=True) # The", "= me_df[::-1].reset_index(drop=True) me_df['date'] = me_df['date'].str.replace('-', '') # Delete non-traded days", "sh_condition = merged_df['ticker_num'].isin(sh) sl_condition = merged_df['ticker_num'].isin(sl) # Construct portfolios bh_portfolio", "bl_portfolio.monthly_return, weights=bl_portfolio.market_cap ) sh_return = np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap ) sl_return", "sl_portfolio.monthly_return, weights=sl_portfolio.market_cap ) # Calculate MOM, and add it to", "= merged_df.loc[month_condition & sh_condition] sl_portfolio = merged_df.loc[month_condition & sl_condition] #", "Sampling/Daily Data - Rahavard 365/'\\ f'{file_number}.txt' df = pd.read_csv( rahavard_path,", "[0, .3, .7, 1] labels = ['L', 'M', 'H'] x_b", "3, 11], names=['date', 'open', 'market_cap'], na_values='-' ) # Change order", "monthly dataframe me_df = me_df.groupby(me_df['date'].str[:6]).last() me_df = me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num',", "'139704', '139705', '139706', '139707', '139708', '139709', '139710', '139711', '139712', '139801',", "market cap and price dfs merged_df = pd.merge(df, me_df, on=['ticker_num',", "# Calculate MOM, and add it to a list mom", "= pd.qcut(x=x_b, q=q, labels=labels).to_dict() x_s = portfo_const_df.loc[ portfo_const_df['size'] == 'S'", "return for month t (t-1, t-12) merged_df['past_year_return'] = ( (merged_df['close']", "rahavard_path = f'E:/Thesis/New Sampling/Daily Data - Rahavard 365/'\\ f'{file_number}.txt' df", "month) bh_condition = merged_df['ticker_num'].isin(bh) bl_condition = merged_df['ticker_num'].isin(bl) sh_condition = merged_df['ticker_num'].isin(sh)", "merged_df.loc[month_condition & sl_condition] # Calculate value-weighted returns bh_return = np.average(", "Calculate MOM, and add it to a list mom =", "me_list = [] for file_number in range(1, 76): print(file_number) me_path", "np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap ) sh_return = np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap )", "all months months = index_df['date'].str[:6].unique().tolist() # The list of months", "['L', 'M', 'H'] x_b = portfo_const_df.loc[ portfo_const_df['size'] == 'B' ]['past_year_return']", "Convert to shamsi dates df['date'] = df['date'].apply( lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d')", "1] labels = ['L', 'M', 'H'] x_b = portfo_const_df.loc[ portfo_const_df['size']", "Check t-13 price condition and t-1 market cap condition previous_month", "str} ) index_df.dropna(inplace=True) # The list of all months months", "'139611', '139612', '139701', '139702', '139703', '139704', '139705', '139706', '139707', '139708',", "price', np.nan) for month in mom_months: # Find t-13 prices", "Bourseview data for market cap # Concat all 75 tickers'", "12 months return for month t (t-1, t-12) merged_df['past_year_return'] =", "for market cap # Concat all 75 tickers' data me_list", "each portfolio in month t # Set conditions month_condition =", "# Find t-13 prices for ticker in range(1, 76): t_13", "me_df = me_df.groupby(me_df['date'].str[:6]).last() me_df = me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num', file_number) me_list.append(me_df)", "'139810', '139811', '139812' ] # The list of months that", "13] t_13_condtion = (merged_df['date'] == t_13) ticker_condition = (merged_df['ticker_num'] ==", "t (t-1, t-12) merged_df['past_year_return'] = ( (merged_df['close'] / merged_df['t-13 price'])", ">= '139212') & (me_df['date'] <= '139900')] me_df.reset_index(drop=True, inplace=True) # Read", "<= portfo_const_df['market_cap'].median() ) ] portfolio_size = np.select(conditions, ['B', 'S']).tolist() portfo_const_df.insert(6,", "# Convert to shamsi dates df['date'] = df['date'].apply( lambda x:", "previous_month) merged_df.loc[ (t_1_condtion & ticker_condition), 't-13 price' ] = t_13_price", "Calculate last 12 months return for month t (t-1, t-12)", "= ( (merged_df['close'] / merged_df['t-13 price']) - 1 ) mom_list", "market cap me_months = [ '139312', '139401', '139402', '139403', '139404',", "portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median() ), ( portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median() ) ]", "in range(1, 76): print(file_number) me_path = f'E:/Thesis/New Sampling/Daily Data -", "+ bh_return) / 2) - ((sl_return + bl_return) / 2)", "'139612', '139701', '139702', '139703', '139704', '139705', '139706', '139707', '139708', '139709',", "= me_df['date'].str.replace('-', '') # Delete non-traded days me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open',", "'139804', '139805', '139806', '139807', '139808', '139809', '139810', '139811', '139812' ]", "np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap ) bl_return = np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap )", "= (merged_df['past_year_return'].notna()) portfo_const_df = merged_df.loc[me_condition & mom_condition] # Split each", "= None # Read Bourseview data for market cap #", "market days index_path = r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls' index_df = pd.read_excel(", "# Set conditions month_condition = (merged_df['date'] == month) bh_condition =", "& bl_condition] sh_portfolio = merged_df.loc[month_condition & sh_condition] sl_portfolio = merged_df.loc[month_condition", "df['monthly_return'] = df['close'].pct_change() close_list.append(df) df = pd.concat(close_list, ignore_index=True) df =", "'139501', '139502', '139503', '139504', '139505', '139506', '139507', '139508', '139509', '139510',", "inplace=True) # Read rahavard 365 data for calculating returns close_list", "- Rahavard 365/'\\ f'{file_number}.txt' df = pd.read_csv( rahavard_path, usecols=[2, 7],", "portfo_const_df.loc[ portfo_const_df['size'] == 'B' ]['past_year_return'] b_mom = pd.qcut(x=x_b, q=q, labels=labels).to_dict()", "usecols=[2, 7], names=['date', 'close'], header=0, dtype={'date': str}, parse_dates=[0] ) #", "dtype={'date': str}, parse_dates=[0] ) # Solve index reading problem, pandas", "# Create monthly dataframe df = df.groupby(df['date'].str[:6]).last() df = df.drop(columns=['date']).reset_index()", "the df df.reset_index(drop=True, inplace=True) # Convert to shamsi dates df['date']", "me_df[::-1].reset_index(drop=True) me_df['date'] = me_df['date'].str.replace('-', '') # Delete non-traded days me_df.dropna(subset=['open'],", "close_list.append(df) df = pd.concat(close_list, ignore_index=True) df = df.loc[(df['date'] >= '139212')", "file_number) df['monthly_return'] = df['close'].pct_change() close_list.append(df) df = pd.concat(close_list, ignore_index=True) df", "'139312', '139401', '139402', '139403', '139404', '139405', '139406', '139407', '139408', '139409',", "MOM group q = [0, .3, .7, 1] labels =", "of months that we need for camculating MOM mom_months =", "76): t_13 = months[months.index(month) - 13] t_13_condtion = (merged_df['date'] ==", "and price dfs merged_df = pd.merge(df, me_df, on=['ticker_num', 'date']) #", "= pd.qcut(x=x_s, q=q, labels=labels).to_dict() portfo_const_df['mom'] = pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect", "me_path = f'E:/Thesis/New Sampling/Daily Data - Bourseview/'\\ f'{file_number}.xlsx' me_df =", "range(1, 76): print(file_number) me_path = f'E:/Thesis/New Sampling/Daily Data - Bourseview/'\\", "mom_months: # Find t-13 prices for ticker in range(1, 76):", "= me_months[me_months.index(month) - 1] t_1_condtion = (merged_df['date'] == previous_month) merged_df.loc[", "camculating MOM mom_months = me_months[1:] # Merge market cap and", "order from old to new dates me_df = me_df[::-1].reset_index(drop=True) me_df['date']", "group q = [0, .3, .7, 1] labels = ['L',", "to shamsi dates df['date'] = df['date'].apply( lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') )", "portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SH' ]['ticker_num'].tolist() sl = portfo_const_df.loc[ portfo_const_df['portfolio'] ==", "add t-13 prices merged_df.insert(5, 't-13 price', np.nan) for month in", "'139708', '139709', '139710', '139711', '139712', '139801', '139802', '139803', '139804', '139805',", "# Construct portfolios bh_portfolio = merged_df.loc[month_condition & bh_condition] bl_portfolio =", "'139408', '139409', '139410', '139411', '139412', '139501', '139502', '139503', '139504', '139505',", "parse_dates=[0] ) # Solve index reading problem, pandas add 2", "range(1, 76): t_13 = months[months.index(month) - 13] t_13_condtion = (merged_df['date']", "df['date'] = df['date'].apply( lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) # Create monthly", "f'E:/Thesis/New Sampling/Daily Data - Rahavard 365/'\\ f'{file_number}.txt' df = pd.read_csv(", "'139405', '139406', '139407', '139408', '139409', '139410', '139411', '139412', '139501', '139502',", "'139711', '139712', '139801', '139802', '139803', '139804', '139805', '139806', '139807', '139808',", "Change order from old to new dates me_df = me_df[::-1].reset_index(drop=True)", "f'E:/Thesis/New Sampling/Daily Data - Bourseview/'\\ f'{file_number}.xlsx' me_df = pd.read_excel( me_path,", "calculating returns close_list = [] for file_number in range(1, 76):", "'139511', '139512', '139601', '139602', '139603', '139604', '139605', '139606', '139607', '139608',", "we need for camculating MOM mom_months = me_months[1:] # Merge", "names=['date', 'close'], header=0, dtype={'date': str}, parse_dates=[0] ) # Solve index", "months that we need for calculating market cap me_months =", "== ticker) try: t_13_price = merged_df.loc[ t_13_condtion & ticker_condition ]['close'].values[0]", "me_df.loc[(me_df['date'] >= '139212') & (me_df['date'] <= '139900')] me_df.reset_index(drop=True, inplace=True) #", "'BH' ]['ticker_num'].tolist() bl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BL' ]['ticker_num'].tolist() sh", "'139406', '139407', '139408', '139409', '139410', '139411', '139412', '139501', '139502', '139503',", "pd.read_csv( rahavard_path, usecols=[2, 7], names=['date', 'close'], header=0, dtype={'date': str}, parse_dates=[0]", "= pd.concat(close_list, ignore_index=True) df = df.loc[(df['date'] >= '139212') & (df['date']", "me_df = me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num', file_number) me_list.append(me_df) me_df = pd.concat(me_list,", "= df['close'].pct_change() close_list.append(df) df = pd.concat(close_list, ignore_index=True) df = df.loc[(df['date']", "( portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median() ) ] portfolio_size = np.select(conditions, ['B',", "= pd.merge(df, me_df, on=['ticker_num', 'date']) # First, create a NaN", "76): print(file_number) me_path = f'E:/Thesis/New Sampling/Daily Data - Bourseview/'\\ f'{file_number}.xlsx'", "me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open', inplace=True) # Create monthly dataframe me_df =", "( portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median() ), ( portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median() )", "portfolio into 3 MOM group q = [0, .3, .7,", "'139510', '139511', '139512', '139601', '139602', '139603', '139604', '139605', '139606', '139607',", "'open', 'market_cap'], na_values='-' ) # Change order from old to", "list of all months months = index_df['date'].str[:6].unique().tolist() # The list", "t_13 = months[months.index(month) - 13] t_13_condtion = (merged_df['date'] == t_13)", "MOM mom_months = me_months[1:] # Merge market cap and price", "pd.read_excel( me_path, skiprows=7, usecols=[2, 3, 11], names=['date', 'open', 'market_cap'], na_values='-'", "str}, parse_dates=[0] ) # Solve index reading problem, pandas add", "me_df.drop(columns='open', inplace=True) # Create monthly dataframe me_df = me_df.groupby(me_df['date'].str[:6]).last() me_df", "indicating open market days index_path = r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls' index_df", "list mom = ( ((sh_return + bh_return) / 2) -", "portfo_const_df['portfolio'] == 'SH' ]['ticker_num'].tolist() sl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SL'", "me_df = pd.concat(me_list, ignore_index=True) me_df = me_df.loc[(me_df['date'] >= '139212') &", "file_number) me_list.append(me_df) me_df = pd.concat(me_list, ignore_index=True) me_df = me_df.loc[(me_df['date'] >=", "Data - Bourseview/'\\ f'{file_number}.xlsx' me_df = pd.read_excel( me_path, skiprows=7, usecols=[2,", "cap and price dfs merged_df = pd.merge(df, me_df, on=['ticker_num', 'date'])", "sl_condition = merged_df['ticker_num'].isin(sl) # Construct portfolios bh_portfolio = merged_df.loc[month_condition &", "and t-1 market cap condition previous_month = months[months.index(month) - 1]", "a NaN column, and then add t-13 prices merged_df.insert(5, 't-13", "calculating market cap me_months = [ '139312', '139401', '139402', '139403',", "= me_df.loc[(me_df['date'] >= '139212') & (me_df['date'] <= '139900')] me_df.reset_index(drop=True, inplace=True)", "t # Set conditions month_condition = (merged_df['date'] == month) bh_condition", "= np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap ) sh_return = np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap", "# Merge market cap and price dfs merged_df = pd.merge(df,", "ignore_index=True) me_df = me_df.loc[(me_df['date'] >= '139212') & (me_df['date'] <= '139900')]", "monthly dataframe df = df.groupby(df['date'].str[:6]).last() df = df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num',", "portfolios bh_portfolio = merged_df.loc[month_condition & bh_condition] bl_portfolio = merged_df.loc[month_condition &", "'139212') & (me_df['date'] <= '139900')] me_df.reset_index(drop=True, inplace=True) # Read rahavard", "merged_df['ticker_num'].isin(sl) # Construct portfolios bh_portfolio = merged_df.loc[month_condition & bh_condition] bl_portfolio", ") # Create monthly dataframe df = df.groupby(df['date'].str[:6]).last() df =", "pandas add 2 index to the df df.reset_index(drop=True, inplace=True) #", "t-13 prices for ticker in range(1, 76): t_13 = months[months.index(month)", "previous_month = months[months.index(month) - 1] me_condition = (merged_df['date'] == previous_month)", "= me_months[1:] # Merge market cap and price dfs merged_df", "bh_return) / 2) - ((sl_return + bl_return) / 2) )", "dfs merged_df = pd.merge(df, me_df, on=['ticker_num', 'date']) # First, create", "ticker numbers portfo_const_df['portfolio'] = ( portfo_const_df['size'] + portfo_const_df['mom'] ) bh", "import jdatetime pd.options.mode.chained_assignment = None # Read Bourseview data for", "# Concat all 75 tickers' data me_list = [] for", "'t-13 price', np.nan) for month in mom_months: # Find t-13", "for calculating returns close_list = [] for file_number in range(1,", "- ((sl_return + bl_return) / 2) ) mom_list.append(mom) mom_df =", "[ ( portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median() ), ( portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median()", ".7, 1] labels = ['L', 'M', 'H'] x_b = portfo_const_df.loc[", "for ticker in range(1, 76): t_13 = months[months.index(month) - 13]", "non-traded days me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open', inplace=True) # Create monthly dataframe", "df = df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num', file_number) df['monthly_return'] = df['close'].pct_change() close_list.append(df)", "f'{file_number}.xlsx' me_df = pd.read_excel( me_path, skiprows=7, usecols=[2, 3, 11], names=['date',", "= merged_df.loc[ t_13_condtion & ticker_condition ]['close'].values[0] previous_month = me_months[me_months.index(month) -", "merged_df.loc[me_condition & mom_condition] # Split each month ME into two", "365/'\\ f'{file_number}.txt' df = pd.read_csv( rahavard_path, usecols=[2, 7], names=['date', 'close'],", "and then add t-13 prices merged_df.insert(5, 't-13 price', np.nan) for", "me_list.append(me_df) me_df = pd.concat(me_list, ignore_index=True) me_df = me_df.loc[(me_df['date'] >= '139212')", "= df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num', file_number) df['monthly_return'] = df['close'].pct_change() close_list.append(df) df", "me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num', file_number) me_list.append(me_df) me_df = pd.concat(me_list, ignore_index=True) me_df", "df = df.loc[(df['date'] >= '139212') & (df['date'] <= '139900')] #", "last 12 months return for month t (t-1, t-12) merged_df['past_year_return']", "mom_months = me_months[1:] # Merge market cap and price dfs", "condition and t-1 market cap condition previous_month = months[months.index(month) -", "(merged_df['date'] == previous_month) mom_condition = (merged_df['past_year_return'].notna()) portfo_const_df = merged_df.loc[me_condition &", "'139410', '139411', '139412', '139501', '139502', '139503', '139504', '139505', '139506', '139507',", "df = pd.read_csv( rahavard_path, usecols=[2, 7], names=['date', 'close'], header=0, dtype={'date':", "list of months that we need for camculating MOM mom_months", "na_values='-' ) # Change order from old to new dates", "(merged_df['past_year_return'].notna()) portfo_const_df = merged_df.loc[me_condition & mom_condition] # Split each month", "add 2 index to the df df.reset_index(drop=True, inplace=True) # Convert", "in mom_months: # Find t-13 prices for ticker in range(1,", "= portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SH' ]['ticker_num'].tolist() sl = portfo_const_df.loc[ portfo_const_df['portfolio']", "file_number in range(1, 76): rahavard_path = f'E:/Thesis/New Sampling/Daily Data -", "'date']) # First, create a NaN column, and then add", "& ticker_condition), 't-13 price' ] = t_13_price except: pass #", "Rahavard 365/'\\ f'{file_number}.txt' df = pd.read_csv( rahavard_path, usecols=[2, 7], names=['date',", "= f'E:/Thesis/New Sampling/Daily Data - Bourseview/'\\ f'{file_number}.xlsx' me_df = pd.read_excel(", "months that we need for camculating MOM mom_months = me_months[1:]", ") mom_list = [] for month in mom_months: # Check", "for calculating market cap me_months = [ '139312', '139401', '139402',", "mom_condition = (merged_df['past_year_return'].notna()) portfo_const_df = merged_df.loc[me_condition & mom_condition] # Split", "# Split each month ME into two groups conditions =", "portfolio_size) # Split each me portfolio into 3 MOM group", "sh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SH' ]['ticker_num'].tolist() sl = portfo_const_df.loc[", "]['ticker_num'].tolist() sh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SH' ]['ticker_num'].tolist() sl =", "of months that we need for calculating market cap me_months", "Extrect portfolio ticker numbers portfo_const_df['portfolio'] = ( portfo_const_df['size'] + portfo_const_df['mom']", "dtype={'date': str} ) index_df.dropna(inplace=True) # The list of all months", "for camculating MOM mom_months = me_months[1:] # Merge market cap", "# Create monthly dataframe me_df = me_df.groupby(me_df['date'].str[:6]).last() me_df = me_df.drop(columns=['date']).reset_index()", "pd.options.mode.chained_assignment = None # Read Bourseview data for market cap", "np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap ) # Calculate MOM, and add it", "me_df = pd.read_excel( me_path, skiprows=7, usecols=[2, 3, 11], names=['date', 'open',", "month in mom_months: # Check t-13 price condition and t-1", "in month t # Set conditions month_condition = (merged_df['date'] ==", "((sh_return + bh_return) / 2) - ((sl_return + bl_return) /", "to a list mom = ( ((sh_return + bh_return) /", "conditions month_condition = (merged_df['date'] == month) bh_condition = merged_df['ticker_num'].isin(bh) bl_condition", "None # Read Bourseview data for market cap # Concat", "column, and then add t-13 prices merged_df.insert(5, 't-13 price', np.nan)", "= (merged_df['date'] == previous_month) merged_df.loc[ (t_1_condtion & ticker_condition), 't-13 price'", "groups conditions = [ ( portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median() ), (", "= [ '139312', '139401', '139402', '139403', '139404', '139405', '139406', '139407',", "'139610', '139611', '139612', '139701', '139702', '139703', '139704', '139705', '139706', '139707',", "Read Bourseview data for market cap # Concat all 75", ") sh_return = np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap ) sl_return = np.average(", "close_list = [] for file_number in range(1, 76): rahavard_path =", "'139412', '139501', '139502', '139503', '139504', '139505', '139506', '139507', '139508', '139509',", "new dates me_df = me_df[::-1].reset_index(drop=True) me_df['date'] = me_df['date'].str.replace('-', '') #", "me_df['date'].str.replace('-', '') # Delete non-traded days me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open', inplace=True)", "t_13_price = merged_df.loc[ t_13_condtion & ticker_condition ]['close'].values[0] previous_month = me_months[me_months.index(month)", "t_13) ticker_condition = (merged_df['ticker_num'] == ticker) try: t_13_price = merged_df.loc[", "portfolio_size = np.select(conditions, ['B', 'S']).tolist() portfo_const_df.insert(6, 'size', portfolio_size) # Split", "= df['date'].apply( lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) # Create monthly dataframe", "merged_df['ticker_num'].isin(bh) bl_condition = merged_df['ticker_num'].isin(bl) sh_condition = merged_df['ticker_num'].isin(sh) sl_condition = merged_df['ticker_num'].isin(sl)", "returns bh_return = np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap ) bl_return = np.average(", "# Calculate value-weighted returns bh_return = np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap )", "try: t_13_price = merged_df.loc[ t_13_condtion & ticker_condition ]['close'].values[0] previous_month =", "me_path, skiprows=7, usecols=[2, 3, 11], names=['date', 'open', 'market_cap'], na_values='-' )", "previous_month) mom_condition = (merged_df['past_year_return'].notna()) portfo_const_df = merged_df.loc[me_condition & mom_condition] #", "'139812' ] # The list of months that we need", "'139701', '139702', '139703', '139704', '139705', '139706', '139707', '139708', '139709', '139710',", "'139705', '139706', '139707', '139708', '139709', '139710', '139711', '139712', '139801', '139802',", "price']) - 1 ) mom_list = [] for month in", "'139403', '139404', '139405', '139406', '139407', '139408', '139409', '139410', '139411', '139412',", "from old to new dates me_df = me_df[::-1].reset_index(drop=True) me_df['date'] =", "t_13_price except: pass # Calculate last 12 months return for", "np.select(conditions, ['B', 'S']).tolist() portfo_const_df.insert(6, 'size', portfolio_size) # Split each me", "Split each month ME into two groups conditions = [", "= r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls' index_df = pd.read_excel( index_path, usecols=[1], names=['date'],", "/ 2) - ((sl_return + bl_return) / 2) ) mom_list.append(mom)", "mom_list = [] for month in mom_months: # Check t-13", "== 'SL' ]['ticker_num'].tolist() # Calculating value-weighted return for each portfolio", "= merged_df.loc[month_condition & bh_condition] bl_portfolio = merged_df.loc[month_condition & bl_condition] sh_portfolio", "Set conditions month_condition = (merged_df['date'] == month) bh_condition = merged_df['ticker_num'].isin(bh)", "= me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num', file_number) me_list.append(me_df) me_df = pd.concat(me_list, ignore_index=True)", "[] for file_number in range(1, 76): print(file_number) me_path = f'E:/Thesis/New", "Create monthly dataframe df = df.groupby(df['date'].str[:6]).last() df = df.drop(columns=['date']).reset_index() df.insert(1,", "for month in mom_months: # Find t-13 prices for ticker", "portfo_const_df.insert(6, 'size', portfolio_size) # Split each me portfolio into 3", "'139503', '139504', '139505', '139506', '139507', '139508', '139509', '139510', '139511', '139512',", "'ticker_num', file_number) df['monthly_return'] = df['close'].pct_change() close_list.append(df) df = pd.concat(close_list, ignore_index=True)", "in range(1, 76): t_13 = months[months.index(month) - 13] t_13_condtion =", "months months = index_df['date'].str[:6].unique().tolist() # The list of months that", "Merge market cap and price dfs merged_df = pd.merge(df, me_df,", "tickers' data me_list = [] for file_number in range(1, 76):", "portfo_const_df = merged_df.loc[me_condition & mom_condition] # Split each month ME", "print(file_number) me_path = f'E:/Thesis/New Sampling/Daily Data - Bourseview/'\\ f'{file_number}.xlsx' me_df", "index reading problem, pandas add 2 index to the df", "'139802', '139803', '139804', '139805', '139806', '139807', '139808', '139809', '139810', '139811',", "reading problem, pandas add 2 index to the df df.reset_index(drop=True,", "(merged_df['date'] == t_13) ticker_condition = (merged_df['ticker_num'] == ticker) try: t_13_price", ".3, .7, 1] labels = ['L', 'M', 'H'] x_b =", "'139607', '139608', '139609', '139610', '139611', '139612', '139701', '139702', '139703', '139704',", "portfo_const_df['portfolio'] == 'SL' ]['ticker_num'].tolist() # Calculating value-weighted return for each", "ticker_condition = (merged_df['ticker_num'] == ticker) try: t_13_price = merged_df.loc[ t_13_condtion", "# The list of months that we need for camculating", "for month t (t-1, t-12) merged_df['past_year_return'] = ( (merged_df['close'] /", "Split each me portfolio into 3 MOM group q =", "bl_return = np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap ) sh_return = np.average( sh_portfolio.monthly_return,", "return for each portfolio in month t # Set conditions", "'139411', '139412', '139501', '139502', '139503', '139504', '139505', '139506', '139507', '139508',", "= merged_df.loc[me_condition & mom_condition] # Split each month ME into", "months = index_df['date'].str[:6].unique().tolist() # The list of months that we", "= pd.read_csv( rahavard_path, usecols=[2, 7], names=['date', 'close'], header=0, dtype={'date': str},", "market cap condition previous_month = months[months.index(month) - 1] me_condition =", "as np import jdatetime pd.options.mode.chained_assignment = None # Read Bourseview", "df = pd.concat(close_list, ignore_index=True) df = df.loc[(df['date'] >= '139212') &", "ticker_condition), 't-13 price' ] = t_13_price except: pass # Calculate", "/ merged_df['t-13 price']) - 1 ) mom_list = [] for", "'139709', '139710', '139711', '139712', '139801', '139802', '139803', '139804', '139805', '139806',", "'close'], header=0, dtype={'date': str}, parse_dates=[0] ) # Solve index reading", "& sl_condition] # Calculate value-weighted returns bh_return = np.average( bh_portfolio.monthly_return,", "df.groupby(df['date'].str[:6]).last() df = df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num', file_number) df['monthly_return'] = df['close'].pct_change()", "add it to a list mom = ( ((sh_return +", "'139507', '139508', '139509', '139510', '139511', '139512', '139601', '139602', '139603', '139604',", "= pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect portfolio ticker numbers portfo_const_df['portfolio'] =", "= ( ((sh_return + bh_return) / 2) - ((sl_return +", "weights=sl_portfolio.market_cap ) # Calculate MOM, and add it to a", "'139505', '139506', '139507', '139508', '139509', '139510', '139511', '139512', '139601', '139602',", "sh_return = np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap ) sl_return = np.average( sl_portfolio.monthly_return,", "<= '139900')] # Read index df for indicating open market", "'139900')] me_df.reset_index(drop=True, inplace=True) # Read rahavard 365 data for calculating", "r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls' index_df = pd.read_excel( index_path, usecols=[1], names=['date'], dtype={'date':", "index_df.dropna(inplace=True) # The list of all months months = index_df['date'].str[:6].unique().tolist()", "me_df.insert(1, 'ticker_num', file_number) me_list.append(me_df) me_df = pd.concat(me_list, ignore_index=True) me_df =", "data me_list = [] for file_number in range(1, 76): print(file_number)", "- 1] t_1_condtion = (merged_df['date'] == previous_month) merged_df.loc[ (t_1_condtion &", "] # The list of months that we need for", "bh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BH' ]['ticker_num'].tolist() bl = portfo_const_df.loc[", "for month in mom_months: # Check t-13 price condition and", "NaN column, and then add t-13 prices merged_df.insert(5, 't-13 price',", "> portfo_const_df['market_cap'].median() ), ( portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median() ) ] portfolio_size", "each me portfolio into 3 MOM group q = [0,", "portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SL' ]['ticker_num'].tolist() # Calculating value-weighted return for", "t-12) merged_df['past_year_return'] = ( (merged_df['close'] / merged_df['t-13 price']) - 1", "= (merged_df['date'] == previous_month) mom_condition = (merged_df['past_year_return'].notna()) portfo_const_df = merged_df.loc[me_condition", "Construct portfolios bh_portfolio = merged_df.loc[month_condition & bh_condition] bl_portfolio = merged_df.loc[month_condition", "& sh_condition] sl_portfolio = merged_df.loc[month_condition & sl_condition] # Calculate value-weighted", "= pd.read_excel( me_path, skiprows=7, usecols=[2, 3, 11], names=['date', 'open', 'market_cap'],", "= merged_df['ticker_num'].isin(sh) sl_condition = merged_df['ticker_num'].isin(sl) # Construct portfolios bh_portfolio =", "= months[months.index(month) - 13] t_13_condtion = (merged_df['date'] == t_13) ticker_condition", "= np.select(conditions, ['B', 'S']).tolist() portfo_const_df.insert(6, 'size', portfolio_size) # Split each", "weights=sh_portfolio.market_cap ) sl_return = np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap ) # Calculate", "'139602', '139603', '139604', '139605', '139606', '139607', '139608', '139609', '139610', '139611',", "== month) bh_condition = merged_df['ticker_num'].isin(bh) bl_condition = merged_df['ticker_num'].isin(bl) sh_condition =", "'B' ]['past_year_return'] b_mom = pd.qcut(x=x_b, q=q, labels=labels).to_dict() x_s = portfo_const_df.loc[", "& bh_condition] bl_portfolio = merged_df.loc[month_condition & bl_condition] sh_portfolio = merged_df.loc[month_condition", "dates me_df = me_df[::-1].reset_index(drop=True) me_df['date'] = me_df['date'].str.replace('-', '') # Delete", "1] me_condition = (merged_df['date'] == previous_month) mom_condition = (merged_df['past_year_return'].notna()) portfo_const_df", "inplace=True) me_df.drop(columns='open', inplace=True) # Create monthly dataframe me_df = me_df.groupby(me_df['date'].str[:6]).last()", "names=['date', 'open', 'market_cap'], na_values='-' ) # Change order from old", "that we need for calculating market cap me_months = [", "(merged_df['date'] == month) bh_condition = merged_df['ticker_num'].isin(bh) bl_condition = merged_df['ticker_num'].isin(bl) sh_condition", "sh_portfolio.monthly_return, weights=sh_portfolio.market_cap ) sl_return = np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap ) #", "'139811', '139812' ] # The list of months that we", "'H'] x_b = portfo_const_df.loc[ portfo_const_df['size'] == 'B' ]['past_year_return'] b_mom =", "merged_df['t-13 price']) - 1 ) mom_list = [] for month", "['B', 'S']).tolist() portfo_const_df.insert(6, 'size', portfolio_size) # Split each me portfolio", "merged_df.loc[month_condition & sh_condition] sl_portfolio = merged_df.loc[month_condition & sl_condition] # Calculate", "lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) # Create monthly dataframe df =", "+ portfo_const_df['mom'] ) bh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BH' ]['ticker_num'].tolist()", "me_df, on=['ticker_num', 'date']) # First, create a NaN column, and", "list of months that we need for calculating market cap", "'139702', '139703', '139704', '139705', '139706', '139707', '139708', '139709', '139710', '139711',", "== 'SH' ]['ticker_num'].tolist() sl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SL' ]['ticker_num'].tolist()", "cap me_months = [ '139312', '139401', '139402', '139403', '139404', '139405',", "The list of months that we need for camculating MOM", "= ['L', 'M', 'H'] x_b = portfo_const_df.loc[ portfo_const_df['size'] == 'B'", "conditions = [ ( portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median() ), ( portfo_const_df['market_cap']", "[ '139312', '139401', '139402', '139403', '139404', '139405', '139406', '139407', '139408',", "= portfo_const_df.loc[ portfo_const_df['size'] == 'B' ]['past_year_return'] b_mom = pd.qcut(x=x_b, q=q,", "]['close'].values[0] previous_month = me_months[me_months.index(month) - 1] t_1_condtion = (merged_df['date'] ==", "- 1 ) mom_list = [] for month in mom_months:", "== previous_month) mom_condition = (merged_df['past_year_return'].notna()) portfo_const_df = merged_df.loc[me_condition & mom_condition]", "ME into two groups conditions = [ ( portfo_const_df['market_cap'] >", "# Calculate last 12 months return for month t (t-1,", "Find t-13 prices for ticker in range(1, 76): t_13 =", "= [] for file_number in range(1, 76): print(file_number) me_path =", "header=0, dtype={'date': str}, parse_dates=[0] ) # Solve index reading problem,", "of all months months = index_df['date'].str[:6].unique().tolist() # The list of", "inplace=True) # Create monthly dataframe me_df = me_df.groupby(me_df['date'].str[:6]).last() me_df =", "portfo_const_df['portfolio'] = ( portfo_const_df['size'] + portfo_const_df['mom'] ) bh = portfo_const_df.loc[", "= [] for month in mom_months: # Check t-13 price", "ignore_index=True) df = df.loc[(df['date'] >= '139212') & (df['date'] <= '139900')]", "for file_number in range(1, 76): print(file_number) me_path = f'E:/Thesis/New Sampling/Daily", "]['ticker_num'].tolist() # Calculating value-weighted return for each portfolio in month", "df['date'].apply( lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) # Create monthly dataframe df", "== 'S' ]['past_year_return'] s_mom = pd.qcut(x=x_s, q=q, labels=labels).to_dict() portfo_const_df['mom'] =", "( ((sh_return + bh_return) / 2) - ((sl_return + bl_return)", "Create monthly dataframe me_df = me_df.groupby(me_df['date'].str[:6]).last() me_df = me_df.drop(columns=['date']).reset_index() me_df.insert(1,", "me_df = me_df[::-1].reset_index(drop=True) me_df['date'] = me_df['date'].str.replace('-', '') # Delete non-traded", "'139801', '139802', '139803', '139804', '139805', '139806', '139807', '139808', '139809', '139810',", "portfo_const_df['mom'] ) bh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BH' ]['ticker_num'].tolist() bl", "pd.merge(df, me_df, on=['ticker_num', 'date']) # First, create a NaN column,", "labels = ['L', 'M', 'H'] x_b = portfo_const_df.loc[ portfo_const_df['size'] ==", ") # Calculate MOM, and add it to a list", "ticker in range(1, 76): t_13 = months[months.index(month) - 13] t_13_condtion", "]['past_year_return'] b_mom = pd.qcut(x=x_b, q=q, labels=labels).to_dict() x_s = portfo_const_df.loc[ portfo_const_df['size']", "merged_df = pd.merge(df, me_df, on=['ticker_num', 'date']) # First, create a", "merged_df['ticker_num'].isin(sh) sl_condition = merged_df['ticker_num'].isin(sl) # Construct portfolios bh_portfolio = merged_df.loc[month_condition", "'139606', '139607', '139608', '139609', '139610', '139611', '139612', '139701', '139702', '139703',", "]['ticker_num'].tolist() bl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BL' ]['ticker_num'].tolist() sh =", "bl_portfolio = merged_df.loc[month_condition & bl_condition] sh_portfolio = merged_df.loc[month_condition & sh_condition]", "index to the df df.reset_index(drop=True, inplace=True) # Convert to shamsi", "2 index to the df df.reset_index(drop=True, inplace=True) # Convert to", "كل6.xls' index_df = pd.read_excel( index_path, usecols=[1], names=['date'], dtype={'date': str} )", "months[months.index(month) - 13] t_13_condtion = (merged_df['date'] == t_13) ticker_condition =", "= ( portfo_const_df['size'] + portfo_const_df['mom'] ) bh = portfo_const_df.loc[ portfo_const_df['portfolio']", "Solve index reading problem, pandas add 2 index to the", "portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median() ) ] portfolio_size = np.select(conditions, ['B', 'S']).tolist()", "'139805', '139806', '139807', '139808', '139809', '139810', '139811', '139812' ] #", "<reponame>behnoud-bazrafshan/ThesisPortfolio import pandas as pd import numpy as np import", "), ( portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median() ) ] portfolio_size = np.select(conditions,", "returns close_list = [] for file_number in range(1, 76): rahavard_path", "shamsi dates df['date'] = df['date'].apply( lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) #", "= me_df.groupby(me_df['date'].str[:6]).last() me_df = me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num', file_number) me_list.append(me_df) me_df", "me_condition = (merged_df['date'] == previous_month) mom_condition = (merged_df['past_year_return'].notna()) portfo_const_df =", "df for indicating open market days index_path = r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص", "portfolio in month t # Set conditions month_condition = (merged_df['date']", "index_df['date'].str[:6].unique().tolist() # The list of months that we need for", "= [ ( portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median() ), ( portfo_const_df['market_cap'] <=", "= pd.read_excel( index_path, usecols=[1], names=['date'], dtype={'date': str} ) index_df.dropna(inplace=True) #", "bl_condition] sh_portfolio = merged_df.loc[month_condition & sh_condition] sl_portfolio = merged_df.loc[month_condition &", "previous_month = me_months[me_months.index(month) - 1] t_1_condtion = (merged_df['date'] == previous_month)", "pd.concat(close_list, ignore_index=True) df = df.loc[(df['date'] >= '139212') & (df['date'] <=", "= portfo_const_df.loc[ portfo_const_df['size'] == 'S' ]['past_year_return'] s_mom = pd.qcut(x=x_s, q=q,", "'139603', '139604', '139605', '139606', '139607', '139608', '139609', '139610', '139611', '139612',", "'S']).tolist() portfo_const_df.insert(6, 'size', portfolio_size) # Split each me portfolio into", "me_df['date'] = me_df['date'].str.replace('-', '') # Delete non-traded days me_df.dropna(subset=['open'], inplace=True)", "'139710', '139711', '139712', '139801', '139802', '139803', '139804', '139805', '139806', '139807',", "cap # Concat all 75 tickers' data me_list = []", "& (me_df['date'] <= '139900')] me_df.reset_index(drop=True, inplace=True) # Read rahavard 365", "'139401', '139402', '139403', '139404', '139405', '139406', '139407', '139408', '139409', '139410',", "jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) # Create monthly dataframe df = df.groupby(df['date'].str[:6]).last() df", "data for calculating returns close_list = [] for file_number in", "portfolio ticker numbers portfo_const_df['portfolio'] = ( portfo_const_df['size'] + portfo_const_df['mom'] )", "import pandas as pd import numpy as np import jdatetime", "month in mom_months: # Find t-13 prices for ticker in", "'market_cap'], na_values='-' ) # Change order from old to new", "cap condition previous_month = months[months.index(month) - 1] me_condition = (merged_df['date']", "q = [0, .3, .7, 1] labels = ['L', 'M',", "= df.loc[(df['date'] >= '139212') & (df['date'] <= '139900')] # Read", "= merged_df['ticker_num'].isin(sl) # Construct portfolios bh_portfolio = merged_df.loc[month_condition & bh_condition]", ">= '139212') & (df['date'] <= '139900')] # Read index df", "365 data for calculating returns close_list = [] for file_number", "mom_condition] # Split each month ME into two groups conditions", "( (merged_df['close'] / merged_df['t-13 price']) - 1 ) mom_list =", "portfo_const_df['portfolio'] == 'BH' ]['ticker_num'].tolist() bl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BL'", "rahavard 365 data for calculating returns close_list = [] for", "portfo_const_df.loc[ portfo_const_df['size'] == 'S' ]['past_year_return'] s_mom = pd.qcut(x=x_s, q=q, labels=labels).to_dict()", "Concat all 75 tickers' data me_list = [] for file_number", "days me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open', inplace=True) # Create monthly dataframe me_df", "open market days index_path = r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls' index_df =", "= df.groupby(df['date'].str[:6]).last() df = df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num', file_number) df['monthly_return'] =", "df.loc[(df['date'] >= '139212') & (df['date'] <= '139900')] # Read index", "(t_1_condtion & ticker_condition), 't-13 price' ] = t_13_price except: pass", "= t_13_price except: pass # Calculate last 12 months return", "= portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SL' ]['ticker_num'].tolist() # Calculating value-weighted return", "== 'B' ]['past_year_return'] b_mom = pd.qcut(x=x_b, q=q, labels=labels).to_dict() x_s =", "= f'E:/Thesis/New Sampling/Daily Data - Rahavard 365/'\\ f'{file_number}.txt' df =", "((sl_return + bl_return) / 2) ) mom_list.append(mom) mom_df = pd.Series(mom_list).to_excel('mom.xlsx')", "jdatetime pd.options.mode.chained_assignment = None # Read Bourseview data for market", "file_number in range(1, 76): print(file_number) me_path = f'E:/Thesis/New Sampling/Daily Data", "df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num', file_number) df['monthly_return'] = df['close'].pct_change() close_list.append(df) df =", "'139809', '139810', '139811', '139812' ] # The list of months", "]['past_year_return'] s_mom = pd.qcut(x=x_s, q=q, labels=labels).to_dict() portfo_const_df['mom'] = pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom))", "'139402', '139403', '139404', '139405', '139406', '139407', '139408', '139409', '139410', '139411',", "1 ) mom_list = [] for month in mom_months: #", "= np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap ) sl_return = np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap", "then add t-13 prices merged_df.insert(5, 't-13 price', np.nan) for month", "a list mom = ( ((sh_return + bh_return) / 2)", "[] for file_number in range(1, 76): rahavard_path = f'E:/Thesis/New Sampling/Daily", "bh_portfolio.monthly_return, weights=bh_portfolio.market_cap ) bl_return = np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap ) sh_return", "'139609', '139610', '139611', '139612', '139701', '139702', '139703', '139704', '139705', '139706',", "range(1, 76): rahavard_path = f'E:/Thesis/New Sampling/Daily Data - Rahavard 365/'\\", "'139212') & (df['date'] <= '139900')] # Read index df for", "Read rahavard 365 data for calculating returns close_list = []", "df df.reset_index(drop=True, inplace=True) # Convert to shamsi dates df['date'] =", "'139506', '139507', '139508', '139509', '139510', '139511', '139512', '139601', '139602', '139603',", "month t # Set conditions month_condition = (merged_df['date'] == month)", "portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BH' ]['ticker_num'].tolist() bl = portfo_const_df.loc[ portfo_const_df['portfolio'] ==", "]['ticker_num'].tolist() sl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SL' ]['ticker_num'].tolist() # Calculating", "portfo_const_df['size'] + portfo_const_df['mom'] ) bh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BH'", "bh_portfolio = merged_df.loc[month_condition & bh_condition] bl_portfolio = merged_df.loc[month_condition & bl_condition]", "'139608', '139609', '139610', '139611', '139612', '139701', '139702', '139703', '139704', '139705',", "problem, pandas add 2 index to the df df.reset_index(drop=True, inplace=True)", "into 3 MOM group q = [0, .3, .7, 1]", "'S' ]['past_year_return'] s_mom = pd.qcut(x=x_s, q=q, labels=labels).to_dict() portfo_const_df['mom'] = pd.Series(b_mom)", "Sampling\\TEDPIX\\شاخص كل6.xls' index_df = pd.read_excel( index_path, usecols=[1], names=['date'], dtype={'date': str}", "index_path = r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls' index_df = pd.read_excel( index_path, usecols=[1],", "3 MOM group q = [0, .3, .7, 1] labels", "'139504', '139505', '139506', '139507', '139508', '139509', '139510', '139511', '139512', '139601',", ") # Change order from old to new dates me_df", "portfo_const_df['portfolio'] == 'BL' ]['ticker_num'].tolist() sh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SH'", "(me_df['date'] <= '139900')] me_df.reset_index(drop=True, inplace=True) # Read rahavard 365 data", "names=['date'], dtype={'date': str} ) index_df.dropna(inplace=True) # The list of all", "skiprows=7, usecols=[2, 3, 11], names=['date', 'open', 'market_cap'], na_values='-' ) #", "75 tickers' data me_list = [] for file_number in range(1,", "- 13] t_13_condtion = (merged_df['date'] == t_13) ticker_condition = (merged_df['ticker_num']", "inplace=True) # Convert to shamsi dates df['date'] = df['date'].apply( lambda", "= portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BH' ]['ticker_num'].tolist() bl = portfo_const_df.loc[ portfo_const_df['portfolio']", "'ticker_num', file_number) me_list.append(me_df) me_df = pd.concat(me_list, ignore_index=True) me_df = me_df.loc[(me_df['date']", "it to a list mom = ( ((sh_return + bh_return)", "pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect portfolio ticker numbers portfo_const_df['portfolio'] = (", "= merged_df.loc[month_condition & bl_condition] sh_portfolio = merged_df.loc[month_condition & sh_condition] sl_portfolio", "bh_return = np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap ) bl_return = np.average( bl_portfolio.monthly_return,", "portfo_const_df['market_cap'].median() ) ] portfolio_size = np.select(conditions, ['B', 'S']).tolist() portfo_const_df.insert(6, 'size',", "sh_portfolio = merged_df.loc[month_condition & sh_condition] sl_portfolio = merged_df.loc[month_condition & sl_condition]", "(t-1, t-12) merged_df['past_year_return'] = ( (merged_df['close'] / merged_df['t-13 price']) -", "me_df.groupby(me_df['date'].str[:6]).last() me_df = me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num', file_number) me_list.append(me_df) me_df =", "portfo_const_df['market_cap'].median() ), ( portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median() ) ] portfolio_size =", "df = df.groupby(df['date'].str[:6]).last() df = df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num', file_number) df['monthly_return']", "pass # Calculate last 12 months return for month t", "sl_condition] # Calculate value-weighted returns bh_return = np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap", "labels=labels).to_dict() x_s = portfo_const_df.loc[ portfo_const_df['size'] == 'S' ]['past_year_return'] s_mom =", "<= '139900')] me_df.reset_index(drop=True, inplace=True) # Read rahavard 365 data for", "# The list of months that we need for calculating", "Bourseview/'\\ f'{file_number}.xlsx' me_df = pd.read_excel( me_path, skiprows=7, usecols=[2, 3, 11],", "(merged_df['date'] == previous_month) merged_df.loc[ (t_1_condtion & ticker_condition), 't-13 price' ]", "in mom_months: # Check t-13 price condition and t-1 market", "bl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BL' ]['ticker_num'].tolist() sh = portfo_const_df.loc[", "np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap ) sl_return = np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap )", "we need for calculating market cap me_months = [ '139312',", "prices for ticker in range(1, 76): t_13 = months[months.index(month) -", "'139512', '139601', '139602', '139603', '139604', '139605', '139606', '139607', '139608', '139609',", "bl_condition = merged_df['ticker_num'].isin(bl) sh_condition = merged_df['ticker_num'].isin(sh) sl_condition = merged_df['ticker_num'].isin(sl) #", "# First, create a NaN column, and then add t-13", "pd import numpy as np import jdatetime pd.options.mode.chained_assignment = None", "merged_df.insert(5, 't-13 price', np.nan) for month in mom_months: # Find", "np.nan) for month in mom_months: # Find t-13 prices for", "'139803', '139804', '139805', '139806', '139807', '139808', '139809', '139810', '139811', '139812'", "all 75 tickers' data me_list = [] for file_number in", "== t_13) ticker_condition = (merged_df['ticker_num'] == ticker) try: t_13_price =", "[] for month in mom_months: # Check t-13 price condition", "& mom_condition] # Split each month ME into two groups", "'139509', '139510', '139511', '139512', '139601', '139602', '139603', '139604', '139605', '139606',", "dataframe me_df = me_df.groupby(me_df['date'].str[:6]).last() me_df = me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num', file_number)", "me_months[1:] # Merge market cap and price dfs merged_df =", "days index_path = r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls' index_df = pd.read_excel( index_path,", "(df['date'] <= '139900')] # Read index df for indicating open", "'139502', '139503', '139504', '139505', '139506', '139507', '139508', '139509', '139510', '139511',", "pd.qcut(x=x_b, q=q, labels=labels).to_dict() x_s = portfo_const_df.loc[ portfo_const_df['size'] == 'S' ]['past_year_return']", "'139404', '139405', '139406', '139407', '139408', '139409', '139410', '139411', '139412', '139501',", "= np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap ) bl_return = np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap", "t-13 prices merged_df.insert(5, 't-13 price', np.nan) for month in mom_months:", "create a NaN column, and then add t-13 prices merged_df.insert(5,", "weights=bh_portfolio.market_cap ) bl_return = np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap ) sh_return =", "q=q, labels=labels).to_dict() x_s = portfo_const_df.loc[ portfo_const_df['size'] == 'S' ]['past_year_return'] s_mom", "bh_condition] bl_portfolio = merged_df.loc[month_condition & bl_condition] sh_portfolio = merged_df.loc[month_condition &", "'139604', '139605', '139606', '139607', '139608', '139609', '139610', '139611', '139612', '139701',", "price' ] = t_13_price except: pass # Calculate last 12", "First, create a NaN column, and then add t-13 prices", "'SH' ]['ticker_num'].tolist() sl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SL' ]['ticker_num'].tolist() #", "portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BL' ]['ticker_num'].tolist() sh = portfo_const_df.loc[ portfo_const_df['portfolio'] ==", "t_13_condtion = (merged_df['date'] == t_13) ticker_condition = (merged_df['ticker_num'] == ticker)", "month_condition = (merged_df['date'] == month) bh_condition = merged_df['ticker_num'].isin(bh) bl_condition =", "= months[months.index(month) - 1] me_condition = (merged_df['date'] == previous_month) mom_condition", "2) - ((sl_return + bl_return) / 2) ) mom_list.append(mom) mom_df", "'139712', '139801', '139802', '139803', '139804', '139805', '139806', '139807', '139808', '139809',", "Read index df for indicating open market days index_path =", "me_months[me_months.index(month) - 1] t_1_condtion = (merged_df['date'] == previous_month) merged_df.loc[ (t_1_condtion", "df.insert(1, 'ticker_num', file_number) df['monthly_return'] = df['close'].pct_change() close_list.append(df) df = pd.concat(close_list,", "& ticker_condition ]['close'].values[0] previous_month = me_months[me_months.index(month) - 1] t_1_condtion =", "= [] for file_number in range(1, 76): rahavard_path = f'E:/Thesis/New", "t_13_condtion & ticker_condition ]['close'].values[0] previous_month = me_months[me_months.index(month) - 1] t_1_condtion", "= np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap ) # Calculate MOM, and add", "index_df = pd.read_excel( index_path, usecols=[1], names=['date'], dtype={'date': str} ) index_df.dropna(inplace=True)", "Delete non-traded days me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open', inplace=True) # Create monthly", "s_mom = pd.qcut(x=x_s, q=q, labels=labels).to_dict() portfo_const_df['mom'] = pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) #", "( portfo_const_df['size'] + portfo_const_df['mom'] ) bh = portfo_const_df.loc[ portfo_const_df['portfolio'] ==", "sh_condition] sl_portfolio = merged_df.loc[month_condition & sl_condition] # Calculate value-weighted returns", "portfo_const_df['mom'] = pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect portfolio ticker numbers portfo_const_df['portfolio']", "that we need for camculating MOM mom_months = me_months[1:] #", "merged_df['past_year_return'] = ( (merged_df['close'] / merged_df['t-13 price']) - 1 )", "= merged_df['ticker_num'].isin(bh) bl_condition = merged_df['ticker_num'].isin(bl) sh_condition = merged_df['ticker_num'].isin(sh) sl_condition =", "import numpy as np import jdatetime pd.options.mode.chained_assignment = None #", "Data - Rahavard 365/'\\ f'{file_number}.txt' df = pd.read_csv( rahavard_path, usecols=[2,", "'139407', '139408', '139409', '139410', '139411', '139412', '139501', '139502', '139503', '139504',", "data for market cap # Concat all 75 tickers' data", "# Delete non-traded days me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open', inplace=True) # Create", "- 1] me_condition = (merged_df['date'] == previous_month) mom_condition = (merged_df['past_year_return'].notna())", "'139706', '139707', '139708', '139709', '139710', '139711', '139712', '139801', '139802', '139803',", "on=['ticker_num', 'date']) # First, create a NaN column, and then", "prices merged_df.insert(5, 't-13 price', np.nan) for month in mom_months: #", "(merged_df['close'] / merged_df['t-13 price']) - 1 ) mom_list = []", ") sl_return = np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap ) # Calculate MOM,", "76): rahavard_path = f'E:/Thesis/New Sampling/Daily Data - Rahavard 365/'\\ f'{file_number}.txt'", "'139605', '139606', '139607', '139608', '139609', '139610', '139611', '139612', '139701', '139702',", "price dfs merged_df = pd.merge(df, me_df, on=['ticker_num', 'date']) # First,", "for indicating open market days index_path = r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls'", "t_1_condtion = (merged_df['date'] == previous_month) merged_df.loc[ (t_1_condtion & ticker_condition), 't-13", "'BL' ]['ticker_num'].tolist() sh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SH' ]['ticker_num'].tolist() sl", "# The list of all months months = index_df['date'].str[:6].unique().tolist() #", "merged_df.loc[ t_13_condtion & ticker_condition ]['close'].values[0] previous_month = me_months[me_months.index(month) - 1]", ") ] portfolio_size = np.select(conditions, ['B', 'S']).tolist() portfo_const_df.insert(6, 'size', portfolio_size)", "# Check t-13 price condition and t-1 market cap condition", "t-1 market cap condition previous_month = months[months.index(month) - 1] me_condition", "np import jdatetime pd.options.mode.chained_assignment = None # Read Bourseview data", "'139900')] # Read index df for indicating open market days", "for file_number in range(1, 76): rahavard_path = f'E:/Thesis/New Sampling/Daily Data", "weights=bl_portfolio.market_cap ) sh_return = np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap ) sl_return =", "pd.qcut(x=x_s, q=q, labels=labels).to_dict() portfo_const_df['mom'] = pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect portfolio", "= portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BL' ]['ticker_num'].tolist() sh = portfo_const_df.loc[ portfo_const_df['portfolio']", "for each portfolio in month t # Set conditions month_condition", "into two groups conditions = [ ( portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median()", "labels=labels).to_dict() portfo_const_df['mom'] = pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect portfolio ticker numbers", "f'{file_number}.txt' df = pd.read_csv( rahavard_path, usecols=[2, 7], names=['date', 'close'], header=0,", "value-weighted return for each portfolio in month t # Set", "'SL' ]['ticker_num'].tolist() # Calculating value-weighted return for each portfolio in", "to the df df.reset_index(drop=True, inplace=True) # Convert to shamsi dates", "t-13 price condition and t-1 market cap condition previous_month =", "merged_df.loc[ (t_1_condtion & ticker_condition), 't-13 price' ] = t_13_price except:", "usecols=[1], names=['date'], dtype={'date': str} ) index_df.dropna(inplace=True) # The list of", "rahavard_path, usecols=[2, 7], names=['date', 'close'], header=0, dtype={'date': str}, parse_dates=[0] )", "'') # Delete non-traded days me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open', inplace=True) #", "condition previous_month = months[months.index(month) - 1] me_condition = (merged_df['date'] ==", "x_s = portfo_const_df.loc[ portfo_const_df['size'] == 'S' ]['past_year_return'] s_mom = pd.qcut(x=x_s,", "mom_months: # Check t-13 price condition and t-1 market cap", "(merged_df['ticker_num'] == ticker) try: t_13_price = merged_df.loc[ t_13_condtion & ticker_condition", ") bl_return = np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap ) sh_return = np.average(", "numbers portfo_const_df['portfolio'] = ( portfo_const_df['size'] + portfo_const_df['mom'] ) bh =", "months[months.index(month) - 1] me_condition = (merged_df['date'] == previous_month) mom_condition =", ") # Solve index reading problem, pandas add 2 index", "two groups conditions = [ ( portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median() ),", "= merged_df['ticker_num'].isin(bl) sh_condition = merged_df['ticker_num'].isin(sh) sl_condition = merged_df['ticker_num'].isin(sl) # Construct", "except: pass # Calculate last 12 months return for month", "1] t_1_condtion = (merged_df['date'] == previous_month) merged_df.loc[ (t_1_condtion & ticker_condition),", "The list of months that we need for calculating market", "dataframe df = df.groupby(df['date'].str[:6]).last() df = df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num', file_number)", "'139808', '139809', '139810', '139811', '139812' ] # The list of", "index_path, usecols=[1], names=['date'], dtype={'date': str} ) index_df.dropna(inplace=True) # The list", "The list of all months months = index_df['date'].str[:6].unique().tolist() # The", "# Split each me portfolio into 3 MOM group q", "'M', 'H'] x_b = portfo_const_df.loc[ portfo_const_df['size'] == 'B' ]['past_year_return'] b_mom", "Calculate value-weighted returns bh_return = np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap ) bl_return", "numpy as np import jdatetime pd.options.mode.chained_assignment = None # Read", "df['close'].pct_change() close_list.append(df) df = pd.concat(close_list, ignore_index=True) df = df.loc[(df['date'] >=", "as pd import numpy as np import jdatetime pd.options.mode.chained_assignment =", "# Read Bourseview data for market cap # Concat all", "me portfolio into 3 MOM group q = [0, .3,", "'139707', '139708', '139709', '139710', '139711', '139712', '139801', '139802', '139803', '139804',", "# Read index df for indicating open market days index_path", "x_b = portfo_const_df.loc[ portfo_const_df['size'] == 'B' ]['past_year_return'] b_mom = pd.qcut(x=x_b,", "portfo_const_df['size'] == 'B' ]['past_year_return'] b_mom = pd.qcut(x=x_b, q=q, labels=labels).to_dict() x_s", "sl_return = np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap ) # Calculate MOM, and", "& (df['date'] <= '139900')] # Read index df for indicating", "months return for month t (t-1, t-12) merged_df['past_year_return'] = (", "and add it to a list mom = ( ((sh_return", "MOM, and add it to a list mom = (", "usecols=[2, 3, 11], names=['date', 'open', 'market_cap'], na_values='-' ) # Change", "need for camculating MOM mom_months = me_months[1:] # Merge market", "in range(1, 76): rahavard_path = f'E:/Thesis/New Sampling/Daily Data - Rahavard", "month t (t-1, t-12) merged_df['past_year_return'] = ( (merged_df['close'] / merged_df['t-13", "Sampling/Daily Data - Bourseview/'\\ f'{file_number}.xlsx' me_df = pd.read_excel( me_path, skiprows=7," ]
[ "calcurated_checksum = hashlib.sha256(f.read()).hexdigest() if checksum not in calcurated_checksum: print('ERROR: checksum", "print('ERROR: checksum is different. download is failed') return False with", "license_key, 'suffix': suffix} data = urllib.parse.urlencode(values) try: urllib.request.urlretrieve( url +", "+ '.mmdb' bucket.upload_file('/tmp/' + mmdb, s3obj) print('INFO: uploaded {0} to", "' '' + context.log_stream_name) response_body['PhysicalResourceId'] = ( physicalResourceId or context.log_stream_name)", "physicalResourceId or context.log_stream_name) response_body['StackId'] = event['StackId'] response_body['RequestId'] = event['RequestId'] response_body['LogicalResourceId']", "event['LogicalResourceId'] response_body['NoEcho'] = noEcho response_body['Data'] = responseData json_response_body = json.dumps(response_body)", "return err.status print(err) raise Exception('ERROR: http error') except Exception as", "filename in put_files: status = download_file(filename) if status == 401:", "def put_to_s3(filename): with open('/tmp/' + filename + '.tar.gz.sha256') as f:", "json.dumps(response_body) print('Response body:\\n' + json_response_body) headers = {'content-type': 'application/json', }", "res = urllib.request.urlopen(req) print('Status code: ' + str(res.status)) except Exception", "if checksum not in calcurated_checksum: print('ERROR: checksum is different. download", "import urllib.request import boto3 # get var from lambda environment", "Reserved.') __version__ = '2.7.1' __license__ = 'MIT-0' __author__ = '<NAME>'", "= 'MIT-0' __author__ = '<NAME>' __url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib", "+ '.mmdb' s3obj = s3key_prefix + filename + '.mmdb' bucket.upload_file('/tmp/'", "__url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib import json import os import", "= 'None' if event: print(json.dumps(event)) try: for filename in put_files:", "'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib import json import os import tarfile import", "as f: checksum = f.read().split()[0] print('INFO: Checksum: ' + checksum)", "= 'geoipdb' status = 'None' if event: print(json.dumps(event)) try: for", "url = 'https://download.maxmind.com/app/geoip_download?' put_files = ['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country'] def download_file(filename):", "urllib.request.urlopen(req) print('Status code: ' + str(res.status)) except Exception as e:", "200 def put_to_s3(filename): with open('/tmp/' + filename + '.tar.gz.sha256') as", "hashlib import json import os import tarfile import urllib.error import", "for filename in put_files: status = download_file(filename) if status ==", "+ '/' + filename + '.mmdb' s3obj = s3key_prefix +", "s3 = boto3.resource('s3') bucket = s3.Bucket(s3bucket_name) url = 'https://download.maxmind.com/app/geoip_download?' put_files", "s3bucket_name = os.environ['s3bucket_name'] license_key = os.environ['license_key'] except KeyError: raise Exception('ERROR:", "not in calcurated_checksum: print('ERROR: checksum is different. download is failed')", "filename + '.tar.gz', 'rb') as f: calcurated_checksum = hashlib.sha256(f.read()).hexdigest() if", "response_body = {} response_body['Status'] = responseStatus response_body['Reason'] = ('See the", "import hashlib import json import os import tarfile import urllib.error", "+ filename + '.tar.gz', 'rb') as f: calcurated_checksum = hashlib.sha256(f.read()).hexdigest()", "' + str(res.status)) except Exception as e: print('send(..) failed executing", "event: print(json.dumps(event)) try: for filename in put_files: status = download_file(filename)", "'tar.gz.sha256']: values = {'edition_id': filename, 'license_key': license_key, 'suffix': suffix} data", "False with tarfile.open('/tmp/' + filename + '.tar.gz', 'r:gz') as tf:", "if status == 401: response = {'status': 'invalide_license_key'} else: response", "print('INFO: uploaded {0} to s3://{1}/{2}'.format( mmdb, s3bucket_name, s3obj)) def send(event,", "__author__ = '<NAME>' __url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib import json", "print('INFO: ' + filename + ' was downloaded') return 200", "from lambda environment try: s3bucket_name = os.environ['s3bucket_name'] license_key = os.environ['license_key']", "'.mmdb' bucket.upload_file('/tmp/' + mmdb, s3obj) print('INFO: uploaded {0} to s3://{1}/{2}'.format(", "in event: response = {'failed_reason': e} send(event, context, 'FAILED', response,", "= s3.Bucket(s3bucket_name) url = 'https://download.maxmind.com/app/geoip_download?' put_files = ['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country']", "err.status == 401: return err.status print(err) raise Exception('ERROR: http error')", "raise Exception('ERROR: impossible to get lambda environment') s3key_prefix = os.environ.get('s3key_prefix',", "s3key_prefix = os.environ.get('s3key_prefix', 'GeoLite2/') s3 = boto3.resource('s3') bucket = s3.Bucket(s3bucket_name)", "affiliates. ' 'All Rights Reserved.') __version__ = '2.7.1' __license__ =", "{'failed_reason': e} send(event, context, 'FAILED', response, physicalResourceId) if event and", "+ '.tar.gz', 'r:gz') as tf: directory = tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb", "except Exception as e: print('send(..) failed executing requests.put(..): ' +", "in put_files: status = download_file(filename) if status == 401: break", "+ '.tar.gz', 'rb') as f: calcurated_checksum = hashlib.sha256(f.read()).hexdigest() if checksum", "tf.extractall(path='/tmp/') mmdb = directory + '/' + filename + '.mmdb'", "put_to_s3(filename) except Exception as e: print(e) if event and 'RequestType'", "response_body['Reason'] = ('See the details in CloudWatch Log Stream: '", "Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0", "context.log_stream_name) response_body['PhysicalResourceId'] = ( physicalResourceId or context.log_stream_name) response_body['StackId'] = event['StackId']", "filename='/tmp/' + filename + '.' + suffix) except urllib.error.HTTPError as", "its affiliates. ' 'All Rights Reserved.') __version__ = '2.7.1' __license__", "{'edition_id': filename, 'license_key': license_key, 'suffix': suffix} data = urllib.parse.urlencode(values) try:", "except Exception as e: print(e) if event and 'RequestType' in", "event['ResponseURL'] print(responseUrl) response_body = {} response_body['Status'] = responseStatus response_body['Reason'] =", "CloudWatch Log Stream: ' '' + context.log_stream_name) response_body['PhysicalResourceId'] = (", "= hashlib.sha256(f.read()).hexdigest() if checksum not in calcurated_checksum: print('ERROR: checksum is", "print(json.dumps(event)) try: for filename in put_files: status = download_file(filename) if", "json import os import tarfile import urllib.error import urllib.parse import", "boto3.resource('s3') bucket = s3.Bucket(s3bucket_name) url = 'https://download.maxmind.com/app/geoip_download?' put_files = ['GeoLite2-City',", "Exception('ERROR: ' + err) print('INFO: ' + filename + '", "err) print('INFO: ' + filename + ' was downloaded') return", "+ '.tar.gz.sha256') as f: checksum = f.read().split()[0] print('INFO: Checksum: '", "tarfile.open('/tmp/' + filename + '.tar.gz', 'r:gz') as tf: directory =", "All Rights Reserved. # SPDX-License-Identifier: MIT-0 __copyright__ = ('Copyright Amazon.com,", "put_to_s3(filename): with open('/tmp/' + filename + '.tar.gz.sha256') as f: checksum", "data = urllib.parse.urlencode(values) try: urllib.request.urlretrieve( url + data, filename='/tmp/' +", "__copyright__ = ('Copyright Amazon.com, Inc. or its affiliates. ' 'All", "if event and 'RequestType' in event: response = {'failed_reason': e}", "checksum) with open('/tmp/' + filename + '.tar.gz', 'rb') as f:", "Stream: ' '' + context.log_stream_name) response_body['PhysicalResourceId'] = ( physicalResourceId or", "status == 401: break put_to_s3(filename) except Exception as e: print(e)", "= os.environ.get('s3key_prefix', 'GeoLite2/') s3 = boto3.resource('s3') bucket = s3.Bucket(s3bucket_name) url", "+ '.' + suffix) except urllib.error.HTTPError as err: if err.status", "+ str(res.status)) except Exception as e: print('send(..) failed executing requests.put(..):", "as e: print('send(..) failed executing requests.put(..): ' + str(e)) def", "= urllib.request.Request( event['ResponseURL'], json_response_body.encode(), headers=headers, method='PUT') try: res = urllib.request.urlopen(req)", "'RequestType' in event: if status == 401: response = {'status':", "as err: if err.status == 401: return err.status print(err) raise", "tf: directory = tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb = directory + '/'", "details in CloudWatch Log Stream: ' '' + context.log_stream_name) response_body['PhysicalResourceId']", "event['StackId'] response_body['RequestId'] = event['RequestId'] response_body['LogicalResourceId'] = event['LogicalResourceId'] response_body['NoEcho'] = noEcho", "'2.7.1' __license__ = 'MIT-0' __author__ = '<NAME>' __url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service'", "Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier:", "== 401: response = {'status': 'invalide_license_key'} else: response = {'status':", "is failed') return False with tarfile.open('/tmp/' + filename + '.tar.gz',", "'FAILED', response, physicalResourceId) if event and 'RequestType' in event: if", "response, physicalResourceId) if event and 'RequestType' in event: if status", "urllib.error import urllib.parse import urllib.request import boto3 # get var", "Amazon.com, Inc. or its affiliates. ' 'All Rights Reserved.') __version__", "directory = tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb = directory + '/' +", "license_key = os.environ['license_key'] except KeyError: raise Exception('ERROR: impossible to get", "'r:gz') as tf: directory = tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb = directory", "' 'All Rights Reserved.') __version__ = '2.7.1' __license__ = 'MIT-0'", "with open('/tmp/' + filename + '.tar.gz.sha256') as f: checksum =", "Inc. or its affiliates. ' 'All Rights Reserved.') __version__ =", "os.environ.get('s3key_prefix', 'GeoLite2/') s3 = boto3.resource('s3') bucket = s3.Bucket(s3bucket_name) url =", "Reserved. # SPDX-License-Identifier: MIT-0 __copyright__ = ('Copyright Amazon.com, Inc. or", "or its affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0 __copyright__", "physicalResourceId = 'geoipdb' status = 'None' if event: print(json.dumps(event)) try:", "'.' + suffix) except urllib.error.HTTPError as err: if err.status ==", "'All Rights Reserved.') __version__ = '2.7.1' __license__ = 'MIT-0' __author__", "in ['tar.gz', 'tar.gz.sha256']: values = {'edition_id': filename, 'license_key': license_key, 'suffix':", "as f: calcurated_checksum = hashlib.sha256(f.read()).hexdigest() if checksum not in calcurated_checksum:", "f: checksum = f.read().split()[0] print('INFO: Checksum: ' + checksum) with", "responseData, physicalResourceId=None, noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl = event['ResponseURL'] print(responseUrl) response_body", "f.read().split()[0] print('INFO: Checksum: ' + checksum) with open('/tmp/' + filename", "tarfile import urllib.error import urllib.parse import urllib.request import boto3 #", "s3obj = s3key_prefix + filename + '.mmdb' bucket.upload_file('/tmp/' + mmdb,", "('See the details in CloudWatch Log Stream: ' '' +", "checksum not in calcurated_checksum: print('ERROR: checksum is different. download is", "context): physicalResourceId = 'geoipdb' status = 'None' if event: print(json.dumps(event))", "status = download_file(filename) if status == 401: break put_to_s3(filename) except", "'license_key': license_key, 'suffix': suffix} data = urllib.parse.urlencode(values) try: urllib.request.urlretrieve( url", "as e: print(e) if event and 'RequestType' in event: response", "and 'RequestType' in event: response = {'failed_reason': e} send(event, context,", "s3bucket_name, s3obj)) def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False): #", "['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country'] def download_file(filename): for suffix in ['tar.gz', 'tar.gz.sha256']:", "def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl", "event: response = {'failed_reason': e} send(event, context, 'FAILED', response, physicalResourceId)", "in event: if status == 401: response = {'status': 'invalide_license_key'}", "json_response_body) headers = {'content-type': 'application/json', } req = urllib.request.Request( event['ResponseURL'],", "= directory + '/' + filename + '.mmdb' s3obj =", "'GeoLite2/') s3 = boto3.resource('s3') bucket = s3.Bucket(s3bucket_name) url = 'https://download.maxmind.com/app/geoip_download?'", "event: if status == 401: response = {'status': 'invalide_license_key'} else:", "+ filename + '.mmdb' bucket.upload_file('/tmp/' + mmdb, s3obj) print('INFO: uploaded", "e: print('send(..) failed executing requests.put(..): ' + str(e)) def lambda_handler(event,", "urllib.request import boto3 # get var from lambda environment try:", "as tf: directory = tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb = directory +", "req = urllib.request.Request( event['ResponseURL'], json_response_body.encode(), headers=headers, method='PUT') try: res =", "Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. #", "s3.Bucket(s3bucket_name) url = 'https://download.maxmind.com/app/geoip_download?' put_files = ['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country'] def", "headers = {'content-type': 'application/json', } req = urllib.request.Request( event['ResponseURL'], json_response_body.encode(),", "to s3://{1}/{2}'.format( mmdb, s3bucket_name, s3obj)) def send(event, context, responseStatus, responseData,", "was downloaded') return 200 def put_to_s3(filename): with open('/tmp/' + filename", "bucket.upload_file('/tmp/' + mmdb, s3obj) print('INFO: uploaded {0} to s3://{1}/{2}'.format( mmdb,", "'<NAME>' __url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib import json import os", "'invalide_license_key'} else: response = {'status': 'downloaded'} send(event, context, 'SUCCESS', response,", "json_response_body = json.dumps(response_body) print('Response body:\\n' + json_response_body) headers = {'content-type':", "str(e)) def lambda_handler(event, context): physicalResourceId = 'geoipdb' status = 'None'", "filename, 'license_key': license_key, 'suffix': suffix} data = urllib.parse.urlencode(values) try: urllib.request.urlretrieve(", "directory + '/' + filename + '.mmdb' s3obj = s3key_prefix", "os.environ['license_key'] except KeyError: raise Exception('ERROR: impossible to get lambda environment')", "response = {'status': 'downloaded'} send(event, context, 'SUCCESS', response, physicalResourceId) return(json.dumps(response))", "physicalResourceId) if event and 'RequestType' in event: if status ==", "print('Response body:\\n' + json_response_body) headers = {'content-type': 'application/json', } req", "Rights Reserved. # SPDX-License-Identifier: MIT-0 __copyright__ = ('Copyright Amazon.com, Inc.", "Rights Reserved.') __version__ = '2.7.1' __license__ = 'MIT-0' __author__ =", "print(responseUrl) response_body = {} response_body['Status'] = responseStatus response_body['Reason'] = ('See", "' + filename + ' was downloaded') return 200 def", "failed') return False with tarfile.open('/tmp/' + filename + '.tar.gz', 'r:gz')", "context, responseStatus, responseData, physicalResourceId=None, noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl = event['ResponseURL']", "Exception('ERROR: http error') except Exception as err: print(err) raise Exception('ERROR:", "open('/tmp/' + filename + '.tar.gz', 'rb') as f: calcurated_checksum =", "+ filename + '.tar.gz.sha256') as f: checksum = f.read().split()[0] print('INFO:", "except KeyError: raise Exception('ERROR: impossible to get lambda environment') s3key_prefix", "import json import os import tarfile import urllib.error import urllib.parse", "uploaded {0} to s3://{1}/{2}'.format( mmdb, s3bucket_name, s3obj)) def send(event, context,", "'MIT-0' __author__ = '<NAME>' __url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib import", "err: if err.status == 401: return err.status print(err) raise Exception('ERROR:", "filename + '.mmdb' bucket.upload_file('/tmp/' + mmdb, s3obj) print('INFO: uploaded {0}", "values = {'edition_id': filename, 'license_key': license_key, 'suffix': suffix} data =", "calcurated_checksum: print('ERROR: checksum is different. download is failed') return False", "except Exception as err: print(err) raise Exception('ERROR: ' + err)", "json_response_body.encode(), headers=headers, method='PUT') try: res = urllib.request.urlopen(req) print('Status code: '", "lambda_handler(event, context): physicalResourceId = 'geoipdb' status = 'None' if event:", "to get lambda environment') s3key_prefix = os.environ.get('s3key_prefix', 'GeoLite2/') s3 =", "response_body['StackId'] = event['StackId'] response_body['RequestId'] = event['RequestId'] response_body['LogicalResourceId'] = event['LogicalResourceId'] response_body['NoEcho']", "var from lambda environment try: s3bucket_name = os.environ['s3bucket_name'] license_key =", "= event['LogicalResourceId'] response_body['NoEcho'] = noEcho response_body['Data'] = responseData json_response_body =", "+ filename + '.mmdb' s3obj = s3key_prefix + filename +", "return False with tarfile.open('/tmp/' + filename + '.tar.gz', 'r:gz') as", "error') except Exception as err: print(err) raise Exception('ERROR: ' +", "+ str(e)) def lambda_handler(event, context): physicalResourceId = 'geoipdb' status =", "} req = urllib.request.Request( event['ResponseURL'], json_response_body.encode(), headers=headers, method='PUT') try: res", "or context.log_stream_name) response_body['StackId'] = event['StackId'] response_body['RequestId'] = event['RequestId'] response_body['LogicalResourceId'] =", "import os import tarfile import urllib.error import urllib.parse import urllib.request", "return 200 def put_to_s3(filename): with open('/tmp/' + filename + '.tar.gz.sha256')", "= f.read().split()[0] print('INFO: Checksum: ' + checksum) with open('/tmp/' +", "mmdb = directory + '/' + filename + '.mmdb' s3obj", "+ ' was downloaded') return 200 def put_to_s3(filename): with open('/tmp/'", "= os.environ['license_key'] except KeyError: raise Exception('ERROR: impossible to get lambda", "'GeoLite2-ASN', 'GeoLite2-Country'] def download_file(filename): for suffix in ['tar.gz', 'tar.gz.sha256']: values", "hashlib.sha256(f.read()).hexdigest() if checksum not in calcurated_checksum: print('ERROR: checksum is different.", "+ err) print('INFO: ' + filename + ' was downloaded')", "filename + ' was downloaded') return 200 def put_to_s3(filename): with", "= ['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country'] def download_file(filename): for suffix in ['tar.gz',", "+ filename + '.tar.gz', 'r:gz') as tf: directory = tf.getmembers()[0].name", "bucket = s3.Bucket(s3bucket_name) url = 'https://download.maxmind.com/app/geoip_download?' put_files = ['GeoLite2-City', 'GeoLite2-ASN',", "except urllib.error.HTTPError as err: if err.status == 401: return err.status", "= {} response_body['Status'] = responseStatus response_body['Reason'] = ('See the details", "== 401: break put_to_s3(filename) except Exception as e: print(e) if", "http error') except Exception as err: print(err) raise Exception('ERROR: '", "response_body['NoEcho'] = noEcho response_body['Data'] = responseData json_response_body = json.dumps(response_body) print('Response", "import urllib.parse import urllib.request import boto3 # get var from", "'rb') as f: calcurated_checksum = hashlib.sha256(f.read()).hexdigest() if checksum not in", "Exception as e: print('send(..) failed executing requests.put(..): ' + str(e))", "try: for filename in put_files: status = download_file(filename) if status", "print(e) if event and 'RequestType' in event: response = {'failed_reason':", "Exception('ERROR: impossible to get lambda environment') s3key_prefix = os.environ.get('s3key_prefix', 'GeoLite2/')", "SPDX-License-Identifier: MIT-0 __copyright__ = ('Copyright Amazon.com, Inc. or its affiliates.", "= responseStatus response_body['Reason'] = ('See the details in CloudWatch Log", "' + err) print('INFO: ' + filename + ' was", "__license__ = 'MIT-0' __author__ = '<NAME>' __url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import", "response_body['LogicalResourceId'] = event['LogicalResourceId'] response_body['NoEcho'] = noEcho response_body['Data'] = responseData json_response_body", "failed executing requests.put(..): ' + str(e)) def lambda_handler(event, context): physicalResourceId", "'.mmdb' s3obj = s3key_prefix + filename + '.mmdb' bucket.upload_file('/tmp/' +", "= ('Copyright Amazon.com, Inc. or its affiliates. ' 'All Rights", "filename + '.mmdb' s3obj = s3key_prefix + filename + '.mmdb'", "Exception as err: print(err) raise Exception('ERROR: ' + err) print('INFO:", "or its affiliates. ' 'All Rights Reserved.') __version__ = '2.7.1'", "mmdb, s3bucket_name, s3obj)) def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False):", "{0} to s3://{1}/{2}'.format( mmdb, s3bucket_name, s3obj)) def send(event, context, responseStatus,", "__version__ = '2.7.1' __license__ = 'MIT-0' __author__ = '<NAME>' __url__", "os.environ['s3bucket_name'] license_key = os.environ['license_key'] except KeyError: raise Exception('ERROR: impossible to", "= urllib.request.urlopen(req) print('Status code: ' + str(res.status)) except Exception as", "import boto3 # get var from lambda environment try: s3bucket_name", "method='PUT') try: res = urllib.request.urlopen(req) print('Status code: ' + str(res.status))", "= {'status': 'invalide_license_key'} else: response = {'status': 'downloaded'} send(event, context,", "= {'edition_id': filename, 'license_key': license_key, 'suffix': suffix} data = urllib.parse.urlencode(values)", "== 401: return err.status print(err) raise Exception('ERROR: http error') except", "tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb = directory + '/' + filename +", "url + data, filename='/tmp/' + filename + '.' + suffix)", "mmdb, s3obj) print('INFO: uploaded {0} to s3://{1}/{2}'.format( mmdb, s3bucket_name, s3obj))", "urllib.request.Request( event['ResponseURL'], json_response_body.encode(), headers=headers, method='PUT') try: res = urllib.request.urlopen(req) print('Status", "context, 'FAILED', response, physicalResourceId) if event and 'RequestType' in event:", "if err.status == 401: return err.status print(err) raise Exception('ERROR: http", "urllib.parse.urlencode(values) try: urllib.request.urlretrieve( url + data, filename='/tmp/' + filename +", "Log Stream: ' '' + context.log_stream_name) response_body['PhysicalResourceId'] = ( physicalResourceId", "send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl =", "# SPDX-License-Identifier: MIT-0 __copyright__ = ('Copyright Amazon.com, Inc. or its", "put_files: status = download_file(filename) if status == 401: break put_to_s3(filename)", "else: response = {'status': 'downloaded'} send(event, context, 'SUCCESS', response, physicalResourceId)", "'.tar.gz', 'r:gz') as tf: directory = tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb =", "downloaded') return 200 def put_to_s3(filename): with open('/tmp/' + filename +", "suffix} data = urllib.parse.urlencode(values) try: urllib.request.urlretrieve( url + data, filename='/tmp/'", "s3key_prefix + filename + '.mmdb' bucket.upload_file('/tmp/' + mmdb, s3obj) print('INFO:", "print('INFO: Checksum: ' + checksum) with open('/tmp/' + filename +", "<reponame>aws-samples/siem-on-amazon-opensearch-service<filename>source/lambda/geoip_downloader/index.py # Copyright Amazon.com, Inc. or its affiliates. All Rights", "os import tarfile import urllib.error import urllib.parse import urllib.request import", "with tarfile.open('/tmp/' + filename + '.tar.gz', 'r:gz') as tf: directory", "= event['RequestId'] response_body['LogicalResourceId'] = event['LogicalResourceId'] response_body['NoEcho'] = noEcho response_body['Data'] =", "+ checksum) with open('/tmp/' + filename + '.tar.gz', 'rb') as", "err: print(err) raise Exception('ERROR: ' + err) print('INFO: ' +", "responseData json_response_body = json.dumps(response_body) print('Response body:\\n' + json_response_body) headers =", "= s3key_prefix + filename + '.mmdb' bucket.upload_file('/tmp/' + mmdb, s3obj)", "try: urllib.request.urlretrieve( url + data, filename='/tmp/' + filename + '.'", "= os.environ['s3bucket_name'] license_key = os.environ['license_key'] except KeyError: raise Exception('ERROR: impossible", "401: break put_to_s3(filename) except Exception as e: print(e) if event", "+ mmdb, s3obj) print('INFO: uploaded {0} to s3://{1}/{2}'.format( mmdb, s3bucket_name,", "in calcurated_checksum: print('ERROR: checksum is different. download is failed') return", "download_file(filename) if status == 401: break put_to_s3(filename) except Exception as", "headers=headers, method='PUT') try: res = urllib.request.urlopen(req) print('Status code: ' +", "lambda environment try: s3bucket_name = os.environ['s3bucket_name'] license_key = os.environ['license_key'] except", "{'status': 'invalide_license_key'} else: response = {'status': 'downloaded'} send(event, context, 'SUCCESS',", "= responseData json_response_body = json.dumps(response_body) print('Response body:\\n' + json_response_body) headers", "('Copyright Amazon.com, Inc. or its affiliates. ' 'All Rights Reserved.')", "print('send(..) failed executing requests.put(..): ' + str(e)) def lambda_handler(event, context):", "data, filename='/tmp/' + filename + '.' + suffix) except urllib.error.HTTPError", "as err: print(err) raise Exception('ERROR: ' + err) print('INFO: '", "context.log_stream_name) response_body['StackId'] = event['StackId'] response_body['RequestId'] = event['RequestId'] response_body['LogicalResourceId'] = event['LogicalResourceId']", "'geoipdb' status = 'None' if event: print(json.dumps(event)) try: for filename", "MIT-0 __copyright__ = ('Copyright Amazon.com, Inc. or its affiliates. '", "'suffix': suffix} data = urllib.parse.urlencode(values) try: urllib.request.urlretrieve( url + data,", "get lambda environment') s3key_prefix = os.environ.get('s3key_prefix', 'GeoLite2/') s3 = boto3.resource('s3')", "print('Status code: ' + str(res.status)) except Exception as e: print('send(..)", "send(event, context, 'FAILED', response, physicalResourceId) if event and 'RequestType' in", "raise Exception('ERROR: http error') except Exception as err: print(err) raise", "checksum = f.read().split()[0] print('INFO: Checksum: ' + checksum) with open('/tmp/'", "s3obj) print('INFO: uploaded {0} to s3://{1}/{2}'.format( mmdb, s3bucket_name, s3obj)) def", "print(err) raise Exception('ERROR: ' + err) print('INFO: ' + filename", "responseStatus response_body['Reason'] = ('See the details in CloudWatch Log Stream:", "break put_to_s3(filename) except Exception as e: print(e) if event and", "suffix in ['tar.gz', 'tar.gz.sha256']: values = {'edition_id': filename, 'license_key': license_key,", "'https://download.maxmind.com/app/geoip_download?' put_files = ['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country'] def download_file(filename): for suffix", "put_files = ['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country'] def download_file(filename): for suffix in", "try: res = urllib.request.urlopen(req) print('Status code: ' + str(res.status)) except", "+ context.log_stream_name) response_body['PhysicalResourceId'] = ( physicalResourceId or context.log_stream_name) response_body['StackId'] =", "' + checksum) with open('/tmp/' + filename + '.tar.gz', 'rb')", "if event and 'RequestType' in event: if status == 401:", "its affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0 __copyright__ =", "'RequestType' in event: response = {'failed_reason': e} send(event, context, 'FAILED',", "download_file(filename): for suffix in ['tar.gz', 'tar.gz.sha256']: values = {'edition_id': filename,", "( physicalResourceId or context.log_stream_name) response_body['StackId'] = event['StackId'] response_body['RequestId'] = event['RequestId']", "with open('/tmp/' + filename + '.tar.gz', 'rb') as f: calcurated_checksum", "urllib.error.HTTPError as err: if err.status == 401: return err.status print(err)", "if event: print(json.dumps(event)) try: for filename in put_files: status =", "boto3 # get var from lambda environment try: s3bucket_name =", "'None' if event: print(json.dumps(event)) try: for filename in put_files: status", "open('/tmp/' + filename + '.tar.gz.sha256') as f: checksum = f.read().split()[0]", "= '2.7.1' __license__ = 'MIT-0' __author__ = '<NAME>' __url__ =", "filename + '.tar.gz.sha256') as f: checksum = f.read().split()[0] print('INFO: Checksum:", "# get var from lambda environment try: s3bucket_name = os.environ['s3bucket_name']", "raise Exception('ERROR: ' + err) print('INFO: ' + filename +", "= noEcho response_body['Data'] = responseData json_response_body = json.dumps(response_body) print('Response body:\\n'", "{} response_body['Status'] = responseStatus response_body['Reason'] = ('See the details in", "['tar.gz', 'tar.gz.sha256']: values = {'edition_id': filename, 'license_key': license_key, 'suffix': suffix}", "get var from lambda environment try: s3bucket_name = os.environ['s3bucket_name'] license_key", "and 'RequestType' in event: if status == 401: response =", "noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl = event['ResponseURL'] print(responseUrl) response_body = {}", "print(err) raise Exception('ERROR: http error') except Exception as err: print(err)", "import tarfile import urllib.error import urllib.parse import urllib.request import boto3", "event['RequestId'] response_body['LogicalResourceId'] = event['LogicalResourceId'] response_body['NoEcho'] = noEcho response_body['Data'] = responseData", "= ('See the details in CloudWatch Log Stream: ' ''", "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.", "physicalResourceId=None, noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl = event['ResponseURL'] print(responseUrl) response_body =", "= tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb = directory + '/' + filename", "= urllib.parse.urlencode(values) try: urllib.request.urlretrieve( url + data, filename='/tmp/' + filename", "'application/json', } req = urllib.request.Request( event['ResponseURL'], json_response_body.encode(), headers=headers, method='PUT') try:", "Exception as e: print(e) if event and 'RequestType' in event:", "= {'content-type': 'application/json', } req = urllib.request.Request( event['ResponseURL'], json_response_body.encode(), headers=headers,", "= ( physicalResourceId or context.log_stream_name) response_body['StackId'] = event['StackId'] response_body['RequestId'] =", "filename + '.tar.gz', 'r:gz') as tf: directory = tf.getmembers()[0].name tf.extractall(path='/tmp/')", "urllib.parse import urllib.request import boto3 # get var from lambda", "different. download is failed') return False with tarfile.open('/tmp/' + filename", "e} send(event, context, 'FAILED', response, physicalResourceId) if event and 'RequestType'", "environment') s3key_prefix = os.environ.get('s3key_prefix', 'GeoLite2/') s3 = boto3.resource('s3') bucket =", "suffix) except urllib.error.HTTPError as err: if err.status == 401: return", "in CloudWatch Log Stream: ' '' + context.log_stream_name) response_body['PhysicalResourceId'] =", "' was downloaded') return 200 def put_to_s3(filename): with open('/tmp/' +", "'' + context.log_stream_name) response_body['PhysicalResourceId'] = ( physicalResourceId or context.log_stream_name) response_body['StackId']", "'GeoLite2-Country'] def download_file(filename): for suffix in ['tar.gz', 'tar.gz.sha256']: values =", "+ filename + ' was downloaded') return 200 def put_to_s3(filename):", "urllib.request.urlretrieve( url + data, filename='/tmp/' + filename + '.' +", "impossible to get lambda environment') s3key_prefix = os.environ.get('s3key_prefix', 'GeoLite2/') s3", "+ filename + '.' + suffix) except urllib.error.HTTPError as err:", "f: calcurated_checksum = hashlib.sha256(f.read()).hexdigest() if checksum not in calcurated_checksum: print('ERROR:", "= {'failed_reason': e} send(event, context, 'FAILED', response, physicalResourceId) if event", "responseUrl = event['ResponseURL'] print(responseUrl) response_body = {} response_body['Status'] = responseStatus", "KeyError: raise Exception('ERROR: impossible to get lambda environment') s3key_prefix =", "response_body['PhysicalResourceId'] = ( physicalResourceId or context.log_stream_name) response_body['StackId'] = event['StackId'] response_body['RequestId']", "+ data, filename='/tmp/' + filename + '.' + suffix) except", "response_body['RequestId'] = event['RequestId'] response_body['LogicalResourceId'] = event['LogicalResourceId'] response_body['NoEcho'] = noEcho response_body['Data']", "response_body['Status'] = responseStatus response_body['Reason'] = ('See the details in CloudWatch", "'.tar.gz.sha256') as f: checksum = f.read().split()[0] print('INFO: Checksum: ' +", "str(res.status)) except Exception as e: print('send(..) failed executing requests.put(..): '", "s3obj)) def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html", "= boto3.resource('s3') bucket = s3.Bucket(s3bucket_name) url = 'https://download.maxmind.com/app/geoip_download?' put_files =", "affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0 __copyright__ = ('Copyright", "checksum is different. download is failed') return False with tarfile.open('/tmp/'", "for suffix in ['tar.gz', 'tar.gz.sha256']: values = {'edition_id': filename, 'license_key':", "response = {'status': 'invalide_license_key'} else: response = {'status': 'downloaded'} send(event,", "the details in CloudWatch Log Stream: ' '' + context.log_stream_name)", "= '<NAME>' __url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib import json import", "= event['ResponseURL'] print(responseUrl) response_body = {} response_body['Status'] = responseStatus response_body['Reason']", "Checksum: ' + checksum) with open('/tmp/' + filename + '.tar.gz',", "err.status print(err) raise Exception('ERROR: http error') except Exception as err:", "response_body['Data'] = responseData json_response_body = json.dumps(response_body) print('Response body:\\n' + json_response_body)", "lambda environment') s3key_prefix = os.environ.get('s3key_prefix', 'GeoLite2/') s3 = boto3.resource('s3') bucket", "event and 'RequestType' in event: if status == 401: response", "filename + '.' + suffix) except urllib.error.HTTPError as err: if", "e: print(e) if event and 'RequestType' in event: response =", "import urllib.error import urllib.parse import urllib.request import boto3 # get", "environment try: s3bucket_name = os.environ['s3bucket_name'] license_key = os.environ['license_key'] except KeyError:", "is different. download is failed') return False with tarfile.open('/tmp/' +", "def lambda_handler(event, context): physicalResourceId = 'geoipdb' status = 'None' if", "'.tar.gz', 'rb') as f: calcurated_checksum = hashlib.sha256(f.read()).hexdigest() if checksum not", "try: s3bucket_name = os.environ['s3bucket_name'] license_key = os.environ['license_key'] except KeyError: raise", "401: return err.status print(err) raise Exception('ERROR: http error') except Exception", "s3://{1}/{2}'.format( mmdb, s3bucket_name, s3obj)) def send(event, context, responseStatus, responseData, physicalResourceId=None,", "{'content-type': 'application/json', } req = urllib.request.Request( event['ResponseURL'], json_response_body.encode(), headers=headers, method='PUT')", "event['ResponseURL'], json_response_body.encode(), headers=headers, method='PUT') try: res = urllib.request.urlopen(req) print('Status code:", "if status == 401: break put_to_s3(filename) except Exception as e:", "401: response = {'status': 'invalide_license_key'} else: response = {'status': 'downloaded'}", "= event['StackId'] response_body['RequestId'] = event['RequestId'] response_body['LogicalResourceId'] = event['LogicalResourceId'] response_body['NoEcho'] =", "# https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl = event['ResponseURL'] print(responseUrl) response_body = {} response_body['Status']", "= 'https://download.maxmind.com/app/geoip_download?' put_files = ['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country'] def download_file(filename): for", "= download_file(filename) if status == 401: break put_to_s3(filename) except Exception", "download is failed') return False with tarfile.open('/tmp/' + filename +", "' + str(e)) def lambda_handler(event, context): physicalResourceId = 'geoipdb' status", "= 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib import json import os import tarfile", "+ suffix) except urllib.error.HTTPError as err: if err.status == 401:", "+ json_response_body) headers = {'content-type': 'application/json', } req = urllib.request.Request(", "def download_file(filename): for suffix in ['tar.gz', 'tar.gz.sha256']: values = {'edition_id':", "= json.dumps(response_body) print('Response body:\\n' + json_response_body) headers = {'content-type': 'application/json',", "responseStatus, responseData, physicalResourceId=None, noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl = event['ResponseURL'] print(responseUrl)", "'/' + filename + '.mmdb' s3obj = s3key_prefix + filename", "requests.put(..): ' + str(e)) def lambda_handler(event, context): physicalResourceId = 'geoipdb'", "event and 'RequestType' in event: response = {'failed_reason': e} send(event,", "executing requests.put(..): ' + str(e)) def lambda_handler(event, context): physicalResourceId =", "code: ' + str(res.status)) except Exception as e: print('send(..) failed", "body:\\n' + json_response_body) headers = {'content-type': 'application/json', } req =", "noEcho response_body['Data'] = responseData json_response_body = json.dumps(response_body) print('Response body:\\n' +", "response = {'failed_reason': e} send(event, context, 'FAILED', response, physicalResourceId) if", "status == 401: response = {'status': 'invalide_license_key'} else: response =", "status = 'None' if event: print(json.dumps(event)) try: for filename in", "https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl = event['ResponseURL'] print(responseUrl) response_body = {} response_body['Status'] =" ]
[ "Y[j] = yRef[j] MInv = MM.invertMatrix(M) for i in range(size):", "2.0 (the \"License\"); # you may not use this file", "'baseline' logging_name = 'isce.mroipac.baseline' parameter_list = (BASELINE_LOCATION,) # Calculate the", "not (len(xRef) == len(yRef)): print(\"Error. Expecting input vectors of same", "master frame def calculateLookAngle(self): lookVector = self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0])) #", "def baseline(self): #TODO This could be further refactored into a", "pixelSize return def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize = pixelSize return def setHeight(self,var):", "lookVector = self.calculateLookVector() az_offset = [] vb = [] hb", "of the difference in position and the \"velocity\" component v_offset", "x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) # Save the", "return normV # Given an orbit and a time, calculate", "calculateScalarVelocity(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') v = sv.getVelocity() normV =", "doc = ('Location at which to compute baselines - \"all\"", "method that calls this method #TODO multiple times to calculate", "[(x2[j] - x1[j]) for j in range(len(x1))] # Calculate the", "masterSV = self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1 =", "if not (size == 3): print(\"Error. Expecting input vectors of", "permissions and # limitations under the License. # # United", "provide the orbits # These provide the range and azimuth", "yRef[j] MInv = MM.invertMatrix(M) for i in range(size): for j", "slaveTime = [self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] #", "the horizontal and vertical baseline components by the look angle", "'+ 'To be used in case there is a large", "= self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate the look vector of", "spacecraft position def calculateBasis(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') x1 =", "getHBaselineAcc(self): return self.hBaselineAcc def getVBaselineTop(self): return self.vBaselineTop def getVBaselineRate(self): return", "[Export] License Required except when exporting to an embargoed country,", "position vector into a unit vector v = MM.normalizeVector(v) #", "addMasterFrame(self): frame = self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame = frame self.startingRange1 = frame.getStartingRange()", "= ('Location at which to compute baselines - \"all\" implies", "= None self.vBaselineRate = None self.vBaselineAcc = None self.pBaselineTop =", "self.pBaselineTop def getPBaselineBottom(self): return self.pBaselineBottom def getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset def", "def getPBaselineTop(self): return self.pBaselineTop def getPBaselineBottom(self): return self.pBaselineBottom def getOrbSlcAzimuthOffset(self):", "None self.vBaselineTop = None self.vBaselineRate = None self.vBaselineAcc = None", "self.rangeOffset def getPhaseConst(self): return self.phaseConst def getLookAngle(self): return self.lookAngle def", "horizontal and vertical baseline components by the look angle vector", "offsets x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0]", "def polynomialFit(self,xRef,yRef): size = len(xRef) if not (len(xRef) == len(yRef)):", "Populate class attributes self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc", "slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i in range(3): # Calculate the", "= self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') # Recalculate the offsets x2 = slaveSV.getPosition()", "the position vector into a unit vector v = MM.normalizeVector(v)", "License for the specific language governing permissions and # limitations", "v = sv.getVelocity() r = MM.normalizeVector(x1) # Turn the position", "az_offset[-1])/2.0 asb_avg = (asb[0] + asb[-1])/2.0 az_offset = (-azb_avg -", "frame.platformHeight except: ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius = ellipsoid.get_a() self.height =", "= slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) # Save the position", "# Calculate a new start time relativeSlaveTime = slaveTime[i] -", "float(var) return def setRadius(self,radius): self.radius = radius return def setMasterStartingRange(self,range):", "BASELINE_LOCATION = Component.Parameter('baselineLocation', public_name = 'BASELINE_LOCATION', default = 'all', type=str,", "Y = [0]*size A = [0]*size M = [[0 for", "over time. for port in self.inputPorts: port() lookVector = self.calculateLookVector()", "return def setRadius(self,radius): self.radius = radius return def setMasterStartingRange(self,range): self.startingRange1", "image') masterTime = [self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)]", "= MM.normalizeVector(v) # Turn the velocity vector into a unit", "component that is perpendicular to the cross-track direction and position", "= None self.orbSlcRangeOffset = None self.rangeOffset = None self.phaseConst =", "# embargoed foreign country or citizen of those countries. #", "retstr += \"Bulk Azimuth Offset: %s\\n\" retlst += (self.orbSlcAzimuthOffset,) retstr", "None self.pBaselineBottom = None self.orbSlcAzimuthOffset = None self.orbSlcRangeOffset = None", "self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1 = masterSV.getPosition() x2", "az_offset = (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset = (self.startingRange1 - self.startingRange2", "def getVelocityVector(self): return self.x2 def setCrossTrackVector(self,c): self.x3 = c def", "spacecraft velocity def calculateScalarVelocity(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') v =", "mandatory=False, doc = ('Location at which to compute baselines -", "MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the length of the projection of the", "'To be used in case there is a large shift", "Calculate the vector perpendicular to the platform position and velocity,", "[] self.x2 = [] self.x3 = [] # A class", "frame.getInstrument().getRangePixelSize() self.masterOrbit = frame.getOrbit() midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize =", "c = MM.normalizeVector(c) v = MM.crossProduct(c,r) # Calculate a the", "two position vectors and a basis, calculate the offset between", "OF ANY KIND, either express or implied. # See the", "See the License for the specific language governing permissions and", "print('Range: ', self.startingRange1) # print('COSL: ', cosl) sinl = math.sqrt(1", "to in writing, software # distributed under the License is", "z = self.masterFrame.terrainHeight except: z = 0.0 cosl = ((self.height-z)*(2*self.radius", "== 'bottom': print('Estimating baselines at bottom of master image') masterTime", "= None self.hBaselineRate = None self.hBaselineAcc = None self.vBaselineTop =", "embargoed foreign country or citizen of those countries. # #", "masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i]) normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate the distance", "or agreed to in writing, software # distributed under the", "return self.hBaselineTop def getHBaselineRate(self): return self.hBaselineRate def getHBaselineAcc(self): return self.hBaselineAcc", "== 3): print(\"Error. Expecting input vectors of length 3.\") raise", "method='hermite') x1 = masterSV.getPosition() x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis)", "= self.polynomialFit(s,vb) h_rate = crossTrackBaselinePolynomialCoefficients[1] # Calculate the gross azimuth", "def setRangePixelSize(self,pixelSize): self.rangePixelSize = pixelSize return def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize =", "j in range(len(x1))] # Calculate the difference between the master", "calculate the rate of baseline change over time. for port", "self.azimuthPixelSize = pixelSize return def setHeight(self,var): self.height = float(var) return", "%s\\n\" retlst += (self.orbSlcAzimuthOffset,) retstr += \"Bulk Range Offset: %s\\n\"", "self.masterFrame.getSensingStop()] else: raise Exception('Unknown baseline location: {0}'.format(self.baselineLocation)) slaveTime = [self.slaveFrame.getSensingMid()", "compliance with the License. # You may obtain a copy", "return def setMasterStartingRange(self,range): self.startingRange1 = range return def setSlaveStartingRange(self,range): self.startingRange2", "self._timeDeltaToSeconds(masterTime[i] - masterTime[0]) s[i] = s[i-1] + deltaT*normV masterSV =", "of master image, '+ '\"bottom\" implies at bottom of master", "# satellite heights and times for the first lines masterFramePort", "change over time. for port in self.inputPorts: port() lookVector =", "or other export # authority as may be required before", "= None self.height = None self.radius = None self.startingRange1 =", "for the first lines masterFramePort = Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame)", "datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else: raise Exception('Unknown baseline location: {0}'.format(self.baselineLocation)) slaveTime =", "masterTime = [self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] elif", "% i) masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i]) normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate", "dx = [(x2[j] - x1[j]) for j in range(len(x1))] #", "math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate the look vector of the master frame", "= [] s = [0.,0.,0.] if self.baselineLocation.lower() == 'all': print('Using", "not use this file except in compliance with the License.", "verticalBaselinePolynomialCoefficients[2] self.pBaselineTop = csb[0] self.pBaselineBottom = csb[-1] self.orbSlcAzimuthOffset = az_offset", "(self.vBaselineTop,) retstr += \"Perpendicular Baseline: %s\\n\" retlst += (self.pBaselineTop,) retstr", "= None self.rangePixelSize = None self.azimuthPixelSize = None self.height =", "= self.masterFrame.terrainHeight except: z = 0.0 cosl = ((self.height-z)*(2*self.radius +", "self.baselineLocation.lower() == 'middle': print('Estimating baselines around center of master image')", "country, # end user, or in support of a prohibited", "you may not use this file except in compliance with", "= 'BASELINE_LOCATION', default = 'all', type=str, mandatory=False, doc = ('Location", "family = 'baseline' logging_name = 'isce.mroipac.baseline' parameter_list = (BASELINE_LOCATION,) #", "[] s = [0.,0.,0.] if self.baselineLocation.lower() == 'all': print('Using entire", "slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') # Recalculate the offsets x2 =", "further refactored into a method that calculates the baseline between", "z = 0.0 cosl = ((self.height-z)*(2*self.radius + self.height + z)", "between the two positions in this basis def calculateBasisOffset(self,x1,x2,basis): dx", "for j in range(size): for i in range(size): M[j][i] =", "range(size): for i in range(size): M[j][i] = math.pow(xRef[j],i) Y[j] =", "\"velocity\" component that is perpendicular to the cross-track direction and", "= (az_offset[0] + az_offset[-1])/2.0 asb_avg = (asb[0] + asb[-1])/2.0 az_offset", "+ datetime.timedelta(seconds=1.0)] # slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i in range(3):", "None def __str__(self): retstr = \"Initial Baseline estimates \\n\" retstr", "Exception if not (size == 3): print(\"Error. Expecting input vectors", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "= csb[0] self.pBaselineBottom = csb[-1] self.orbSlcAzimuthOffset = az_offset self.orbSlcRangeOffset =", "the old Component self.dictionaryOfOutputVariables = {} self.dictionaryOfVariables = {} self.descriptionOfVariables", "= midSV.getScalarVelocity()/prf try: ellipsoid = frame._ellipsoid #UAVSAR frame creates ellipsoid", "# Calculate the length of the projection of the difference", "shift between images.') ) class Baseline(Component): family = 'baseline' logging_name", "sizes, starting ranges, # satellite heights and times for the", "for estimating baselines') masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower() == 'middle':", "method='hermite') self.azimuthPixelSize = midSV.getScalarVelocity()/prf try: ellipsoid = frame._ellipsoid #UAVSAR frame", "+= MInv[i][j]*Y[j] return A def setRangePixelSize(self,pixelSize): self.rangePixelSize = pixelSize return", "self.height) # print('Radius: ', self.radius) # print('Range: ', self.startingRange1) #", "self.radius = None self.startingRange1 = None self.startingRange2 = None self.hBaselineTop", "= radius return def setMasterStartingRange(self,range): self.startingRange1 = range return def", "a basis, calculate the offset between the two positions in", "v = MM.normalizeVector(v) # Turn the velocity vector into a", "\"Bulk Range Offset: %s\\n\" retlst += (self.orbSlcRangeOffset,) return retstr %", "= [] hb = [] csb = [] asb =", "a planet, and the two prfs # These provide the", "A[i] += MInv[i][j]*Y[j] return A def setRangePixelSize(self,pixelSize): self.rangePixelSize = pixelSize", "implies at bottom of master image, '+ '\"middle\" implies near", "{} self.descriptionOfVariables = {} self.mandatoryVariables = [] self.optionalVariables = []", "Expecting input vectors of same length.\") raise Exception if not", "None self.height = None self.radius = None self.startingRange1 = None", "StateVector # A class to hold three-dimensional basis vectors class", "a time, range and azimuth pixel sizes # the two", "\"velocity\" component v_offset = MM.dotProduct(dx,basis.getPositionVector()) c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset", "of the scene # First, get the position and velocity", "None self.orbSlcRangeOffset = None self.rangeOffset = None self.phaseConst = -99999", "vector c = MM.crossProduct(r,v) # Calculate the vector perpendicular to", "z_offset = MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the length of the projection", "= [(x2[j] - x1[j]) for j in range(len(x1))] # Calculate", "# the user agrees to comply with all applicable U.S.", "cosl = ((self.height-z)*(2*self.radius + self.height + z) + self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius", "self.rangePixelSize = None self.azimuthPixelSize = None self.height = None self.radius", "c def getCrossTrackVector(self): return self.x3 BASELINE_LOCATION = Component.Parameter('baselineLocation', public_name =", "implies '+ 'top, middle, bottom of master image, '+ '\"top\"", "and the two prfs # These provide the orbits #", "perpendicular to the platform position and velocity, this is the", "masterSV.getPosition() x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) # Save", "self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop", "time %s\" % i) masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i]) normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i])", "self.vBaselineRate = None self.vBaselineAcc = None self.pBaselineTop = None self.pBaselineBottom", "difference between the master and slave position vectors z_offset =", "- datetime.timedelta(seconds=(z_offset/normV)) slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') # Recalculate the offsets", "= [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i in range(3): # Calculate the Baseline", "heights and times for the first lines masterFramePort = Port(name='masterFrame',method=self.addMasterFrame)", "self.radius = ellipsoid.get_a() self.height = midSV.calculateHeight(ellipsoid) def addSlaveFrame(self): frame =", "= verticalBaselinePolynomialCoefficients[2] self.pBaselineTop = csb[0] self.pBaselineBottom = csb[-1] self.orbSlcAzimuthOffset =", "orthogonal basis for cross-track and velocity directions # based on", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "setSlaveStartingRange(self,range): self.startingRange2 = range return def getHBaselineTop(self): return self.hBaselineTop def", "self.inputPorts: port() lookVector = self.calculateLookVector() az_offset = [] vb =", "country or citizen of those countries. # # Author: <NAME>", "location: {0}'.format(self.baselineLocation)) slaveTime = [self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() +", "j in range(size): A[i] += MInv[i][j]*Y[j] return A def setRangePixelSize(self,pixelSize):", "fit to the baseline polynomial def polynomialFit(self,xRef,yRef): size = len(xRef)", "hb.append(c_offset) csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) # Multiply the horizontal and vertical", "masterTime[0]) s[i] = s[i-1] + deltaT*normV masterSV = self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite')", "frame.getOrbit() def __init__(self, name=''): self.masterOrbit = None self.slaveOrbit = None", "logging.getLogger('isce.mroipac.baseline') self.createPorts() # Satisfy the old Component self.dictionaryOfOutputVariables = {}", "self.baselineLocation.lower() == 'all': print('Using entire span of image for estimating", "(self.orbSlcAzimuthOffset,) retstr += \"Bulk Range Offset: %s\\n\" retlst += (self.orbSlcRangeOffset,)", "between #TODO frames when given a master time and a", "of master image') masterTime = [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart()", "{} self.mandatoryVariables = [] self.optionalVariables = [] return None def", "= MM.dotProduct(dx,basis.getPositionVector()) c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset # Calculate the", "file except in compliance with the License. # You may", "end use). By downloading this software, # the user agrees", "be further refactored into a method that calculates the baseline", "self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower() == 'bottom': print('Estimating baselines at", "the distance moved since the last baseline point if (i", "[] return None def createPorts(self): # Set input ports #", "self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame = frame self.startingRange1 = frame.getStartingRange() prf = frame.getInstrument().getPulseRepetitionFrequency()", "citizen of those countries. # # Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import", "'\"bottom\" implies at bottom of master image, '+ '\"middle\" implies", "there is a large shift between images.') ) class Baseline(Component):", "getHBaselineTop(self): return self.hBaselineTop def getHBaselineRate(self): return self.hBaselineRate def getHBaselineAcc(self): return", "of master image. '+ 'To be used in case there", "offset between the two positions in this basis def calculateBasisOffset(self,x1,x2,basis):", "may be required before exporting this software to any 'EAR99'", "frame def calculateLookAngle(self): lookVector = self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate", "j in range(size): for i in range(size): M[j][i] = math.pow(xRef[j],i)", "self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] # slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i", "def getVBaselineAcc(self): return self.vBaselineAcc def getPBaselineTop(self): return self.pBaselineTop def getPBaselineBottom(self):", "name=name) self.logger = logging.getLogger('isce.mroipac.baseline') self.createPorts() # Satisfy the old Component", "= None self.azimuthPixelSize = None self.height = None self.radius =", "[] csb = [] asb = [] s = [0.,0.,0.]", "range(size) ] for j in range(size)] for j in range(size):", "baseline polynomial def polynomialFit(self,xRef,yRef): size = len(xRef) if not (len(xRef)", "try: ellipsoid = frame._ellipsoid #UAVSAR frame creates ellipsoid with peg", "vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) # Multiply the horizontal and", "the two starting ranges, a planet, and the two prfs", "# Multiply the horizontal and vertical baseline components by the", "azimuth pixel sizes, starting ranges, # satellite heights and times", "orbit and a time, calculate an orthogonal basis for cross-track", "KIND, either express or implied. # See the License for", "def setMasterStartingRange(self,range): self.startingRange1 = range return def setSlaveStartingRange(self,range): self.startingRange2 =", "[cosl,sinl] # Calculate the scalar spacecraft velocity def calculateScalarVelocity(self,orbit,time): sv", "the scalar spacecraft velocity def calculateScalarVelocity(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite')", "of same length.\") raise Exception if not (size == 3):", "in range(size): for i in range(size): M[j][i] = math.pow(xRef[j],i) Y[j]", "sv.getVelocity() normV = MM.norm(v) return normV # Given an orbit", "return self.lookAngle def _timeDeltaToSeconds(self,td): return (td.microseconds + (td.seconds + td.days", "= frame._ellipsoid #UAVSAR frame creates ellipsoid with peg self.radius =", "self.vBaselineAcc = None self.pBaselineTop = None self.pBaselineBottom = None self.orbSlcAzimuthOffset", "def getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset def getRangeOffset(self):", "az_offset.append(z_offset) # Save the position offset # Calculate a new", "be required before exporting this software to any 'EAR99' #", "(the \"License\"); # you may not use this file except", "U.S. export laws and regulations. # The user has the", "basis.setVelocityVector(v) basis.setCrossTrackVector(c) return basis # Given two position vectors and", "+= \"Bulk Azimuth Offset: %s\\n\" retlst += (self.orbSlcAzimuthOffset,) retstr +=", "(-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset = (self.startingRange1 - self.startingRange2 - asb_avg)/(self.rangePixelSize)", "math.sqrt(1 - cosl*cosl) return [cosl,sinl] # Calculate the scalar spacecraft", "the c, or cross-track vector c = MM.normalizeVector(c) v =", "# United States Government Sponsorship acknowledged. This software is subject", "basis def calculateBasisOffset(self,x1,x2,basis): dx = [(x2[j] - x1[j]) for j", "# These provide the orbits # These provide the range", "to the cross-track direction and position basis = BaselineBasis() basis.setPositionVector(r)", "0.0 cosl = ((self.height-z)*(2*self.radius + self.height + z) + self.startingRange1*self.startingRange1)/(", "print(\"Error. Expecting input vectors of length 3.\") raise Exception Y", "sv.getPosition() v = sv.getVelocity() r = MM.normalizeVector(x1) # Turn the", "def __init__(self): self.x1 = [] self.x2 = [] self.x3 =", "# # Unless required by applicable law or agreed to", "the look vector of the master frame def calculateLookVector(self): try:", "calculates the baseline between #TODO frames when given a master", "import math import datetime import logging from iscesys.Component.Component import Component,", "and a time, calculate an orthogonal basis for cross-track and", "# A class to hold three-dimensional basis vectors class Basis(object):", "[self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower() == 'middle': print('Estimating baselines around center of", "(i > 0): deltaT = self._timeDeltaToSeconds(masterTime[i] - masterTime[0]) s[i] =", "getRangeOffset(self): return self.rangeOffset def getPhaseConst(self): return self.phaseConst def getLookAngle(self): return", "States Government Sponsorship acknowledged. This software is subject to #", "acknowledged. This software is subject to # U.S. export control", "i in range(size) ] for j in range(size)] for j", "#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math import datetime import logging from iscesys.Component.Component import", "of the master frame def calculateLookVector(self): try: z = self.masterFrame.terrainHeight", "for i in range(size): for j in range(size): A[i] +=", "= slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV)) slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') # Recalculate", "self.pBaselineTop = None self.pBaselineBottom = None self.orbSlcAzimuthOffset = None self.orbSlcRangeOffset", "moved since the last baseline point if (i > 0):", "frame.getStartingRange() prf = frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize = frame.getInstrument().getRangePixelSize() self.masterOrbit = frame.getOrbit()", "implied. # See the License for the specific language governing", "= (self.hBaselineTop,) retstr += \"Vertical Baseline: %s\\n\" retlst += (self.vBaselineTop,)", "Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math import datetime import logging from", "in support of a prohibited end use). By downloading this", "in position and the \"velocity\" component v_offset = MM.dotProduct(dx,basis.getPositionVector()) c_offset", "when given a master time and a slave time and", "= midSV.calculateHeight(ellipsoid) def addSlaveFrame(self): frame = self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame = frame", "# Given an orbit and a time, calculate an orthogonal", "return self.hBaselineAcc def getVBaselineTop(self): return self.vBaselineTop def getVBaselineRate(self): return self.vBaselineRate", "ellipsoid.get_a() self.height = midSV.calculateHeight(ellipsoid) def addSlaveFrame(self): frame = self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame", "Basis.__init__(self) def setPositionVector(self,x): self.x1 = x def getPositionVector(self): return self.x1", "'top': print('Estimating baselines at top of master image') masterTime =", "#UAVSAR frame creates ellipsoid with peg self.radius = ellipsoid.pegRadCur self.height", "# Calculate the vector perpendicular to the platform position and", "3.\") raise Exception Y = [0]*size A = [0]*size M", "self.slaveOrbit = frame.getOrbit() def __init__(self, name=''): self.masterOrbit = None self.slaveOrbit", "(No [Export] License Required except when exporting to an embargoed", "default = 'all', type=str, mandatory=False, doc = ('Location at which", "the rate of baseline change over time. for port in", "calculateBasis(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') x1 = sv.getPosition() v =", "MInv[i][j]*Y[j] return A def setRangePixelSize(self,pixelSize): self.rangePixelSize = pixelSize return def", "= None self.rangeOffset = None self.phaseConst = -99999 super(Baseline, self).__init__(family=self.__class__.family,", "= frame.getInstrument().getRangePixelSize() self.masterOrbit = frame.getOrbit() midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize", "planet, and the two prfs # These provide the orbits", "Unless required by applicable law or agreed to in writing,", "self.rangePixelSize = frame.getInstrument().getRangePixelSize() self.masterOrbit = frame.getOrbit() midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite')", "az_offset = [] vb = [] hb = [] csb", "= [] asb = [] s = [0.,0.,0.] if self.baselineLocation.lower()", "self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate the look vector of the", "{} self.dictionaryOfVariables = {} self.descriptionOfVariables = {} self.mandatoryVariables = []", "cross-track and velocity directions # based on the spacecraft position", "%s\\n\" retlst = (self.hBaselineTop,) retstr += \"Vertical Baseline: %s\\n\" retlst", "the specific language governing permissions and # limitations under the", "s[i-1] + deltaT*normV masterSV = self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i],", "under the License. # # United States Government Sponsorship acknowledged.", "r_offset = (self.startingRange1 - self.startingRange2 - asb_avg)/(self.rangePixelSize) # Populate class", "[] self.x3 = [] # A class to hold three-dimensional", "self.startingRange1 = frame.getStartingRange() prf = frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize = frame.getInstrument().getRangePixelSize() self.masterOrbit", "return None def __str__(self): retstr = \"Initial Baseline estimates \\n\"", "in case there is a large shift between images.') )", "and times for the first lines masterFramePort = Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort", "def setPositionVector(self,x): self.x1 = x def getPositionVector(self): return self.x1 def", "not (size == 3): print(\"Error. Expecting input vectors of length", "None self.lookAngle = None self.rangePixelSize = None self.azimuthPixelSize = None", "setPositionVector(self,x): self.x1 = x def getPositionVector(self): return self.x1 def setVelocityVector(self,v):", "\"Cross-track Baseline: %s\\n\" retlst = (self.hBaselineTop,) retstr += \"Vertical Baseline:", "for j in range(size): A[i] += MInv[i][j]*Y[j] return A def", "self.createPorts() # Satisfy the old Component self.dictionaryOfOutputVariables = {} self.dictionaryOfVariables", "attributes self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2]", "= frame self.startingRange1 = frame.getStartingRange() prf = frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize =", "port in self.inputPorts: port() lookVector = self.calculateLookVector() az_offset = []", "] for j in range(size)] for j in range(size): for", "= {} self.mandatoryVariables = [] self.optionalVariables = [] return None", "return basis # Given two position vectors and a basis,", "self.orbSlcRangeOffset def getRangeOffset(self): return self.rangeOffset def getPhaseConst(self): return self.phaseConst def", "def _timeDeltaToSeconds(self,td): return (td.microseconds + (td.seconds + td.days * 24.0", "h_rate = crossTrackBaselinePolynomialCoefficients[1] # Calculate the gross azimuth and range", "applicable U.S. export laws and regulations. # The user has", "for port in self.inputPorts: port() lookVector = self.calculateLookVector() az_offset =", "basis vectors for spacecraft baselines class BaselineBasis(Basis): def __init__(self): Basis.__init__(self)", "basis = BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c) return basis # Given", "- self.startingRange2 - asb_avg)/(self.rangePixelSize) # Populate class attributes self.hBaselineTop =", "', self.height) # print('Radius: ', self.radius) # print('Range: ', self.startingRange1)", "retstr += \"Vertical Baseline: %s\\n\" retlst += (self.vBaselineTop,) retstr +=", "gross azimuth and range offsets azb_avg = (az_offset[0] + az_offset[-1])/2.0", "+= \"Bulk Range Offset: %s\\n\" retlst += (self.orbSlcRangeOffset,) return retstr", "method='hermite') # Recalculate the offsets x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) =", "# # United States Government Sponsorship acknowledged. This software is", "retstr = \"Initial Baseline estimates \\n\" retstr += \"Cross-track Baseline:", "def __str__(self): retstr = \"Initial Baseline estimates \\n\" retstr +=", "those countries. # # Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math import", "Azimuth Offset: %s\\n\" retlst += (self.orbSlcAzimuthOffset,) retstr += \"Bulk Range", "self.height) ) # print('Height: ', self.height) # print('Radius: ', self.radius)", "'all', type=str, mandatory=False, doc = ('Location at which to compute", "# Calculate the Look Angle of the master frame def", "= [] self.x3 = [] # A class to hold", "= orbit.interpolateOrbit(time, method='hermite') v = sv.getVelocity() normV = MM.norm(v) return", "self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] self.pBaselineTop = csb[0] self.pBaselineBottom = csb[-1] self.orbSlcAzimuthOffset", "calculateBasisOffset(self,x1,x2,basis): dx = [(x2[j] - x1[j]) for j in range(len(x1))]", "as MM from isceobj.Orbit.Orbit import StateVector # A class to", "the difference in position and the \"velocity\" component v_offset =", "satellite heights and times for the first lines masterFramePort =", "# print('Height: ', self.height) # print('Radius: ', self.radius) # print('Range:", "[self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i in range(3): # Calculate the Baseline at", "(self.hBaselineTop,) retstr += \"Vertical Baseline: %s\\n\" retlst += (self.vBaselineTop,) retstr", "the spacecraft position def calculateBasis(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') x1", "export laws and regulations. # The user has the responsibility", "two prfs # These provide the orbits # These provide", "two orbits, a time, range and azimuth pixel sizes #", "# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED.", "= [0.,0.,0.] if self.baselineLocation.lower() == 'all': print('Using entire span of", "> 0): deltaT = self._timeDeltaToSeconds(masterTime[i] - masterTime[0]) s[i] = s[i-1]", "to hold three-dimensional basis vectors class Basis(object): def __init__(self): self.x1", "* 10**6) / 10**6 def addMasterFrame(self): frame = self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame", "vector of the master frame def calculateLookVector(self): try: z =", "def getVBaselineTop(self): return self.vBaselineTop def getVBaselineRate(self): return self.vBaselineRate def getVBaselineAcc(self):", "i in range(size): for j in range(size): A[i] += MInv[i][j]*Y[j]", "starting ranges, # satellite heights and times for the first", "None self.rangeOffset = None self.phaseConst = -99999 super(Baseline, self).__init__(family=self.__class__.family, name=name)", "None self.slaveOrbit = None self.masterFrame = None self.slaveFrame = None", "this is the c, or cross-track vector c = MM.normalizeVector(c)", "def createPorts(self): # Set input ports # It looks like", "large shift between images.') ) class Baseline(Component): family = 'baseline'", "near middle of master image. '+ 'To be used in", "frame._ellipsoid #UAVSAR frame creates ellipsoid with peg self.radius = ellipsoid.pegRadCur", "as 'EAR99 NLR' # (No [Export] License Required except when", "datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower() == 'top': print('Estimating", "= crossTrackBaselinePolynomialCoefficients[1] # Calculate the gross azimuth and range offsets", "lines masterFramePort = Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return", "self.startingRange1 - self.startingRange2 # Calculate a quadratic fit to the", "= ellipsoid.pegRadCur self.height = frame.platformHeight except: ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius", "public_name = 'BASELINE_LOCATION', default = 'all', type=str, mandatory=False, doc =", "You may obtain a copy of the License at #", "sinl = math.sqrt(1 - cosl*cosl) return [cosl,sinl] # Calculate the", "orbit.interpolateOrbit(time, method='hermite') x1 = sv.getPosition() v = sv.getVelocity() r =", "class BaselineBasis(Basis): def __init__(self): Basis.__init__(self) def setPositionVector(self,x): self.x1 = x", "= frame.platformHeight except: ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius = ellipsoid.get_a() self.height", "None self.masterFrame = None self.slaveFrame = None self.lookAngle = None", "'BASELINE_LOCATION', default = 'all', type=str, mandatory=False, doc = ('Location at", ") # print('Height: ', self.height) # print('Radius: ', self.radius) #", "this basis def calculateBasisOffset(self,x1,x2,basis): dx = [(x2[j] - x1[j]) for", "= None self.radius = None self.startingRange1 = None self.startingRange2 =", "getVBaselineTop(self): return self.vBaselineTop def getVBaselineRate(self): return self.vBaselineRate def getVBaselineAcc(self): return", "in range(3): # Calculate the Baseline at the start of", "%s\" % i) masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i]) normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) #", "= [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower()", "hold three-dimensional basis vectors for spacecraft baselines class BaselineBasis(Basis): def", "A = [0]*size M = [[0 for i in range(size)", "self.x3 = c def getCrossTrackVector(self): return self.x3 BASELINE_LOCATION = Component.Parameter('baselineLocation',", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "prohibited end use). By downloading this software, # the user", "= sv.getPosition() v = sv.getVelocity() r = MM.normalizeVector(x1) # Turn", "return self.pBaselineTop def getPBaselineBottom(self): return self.pBaselineBottom def getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset", "laws and regulations and has been classified as 'EAR99 NLR'", "self.polynomialFit(s,vb) h_rate = crossTrackBaselinePolynomialCoefficients[1] # Calculate the gross azimuth and", "= sv.getVelocity() r = MM.normalizeVector(x1) # Turn the position vector", "self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset def getRangeOffset(self): return self.rangeOffset def", "v = MM.crossProduct(c,r) # Calculate a the \"velocity\" component that", "MM from isceobj.Orbit.Orbit import StateVector # A class to hold", "class Baseline(Component): family = 'baseline' logging_name = 'isce.mroipac.baseline' parameter_list =", "None self.hBaselineTop = None self.hBaselineRate = None self.hBaselineAcc = None", "baselines at bottom of master image') masterTime = [self.masterFrame.getSensingStop() -", "self.vBaselineAcc def getPBaselineTop(self): return self.pBaselineTop def getPBaselineBottom(self): return self.pBaselineBottom def", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "Calculate the length of the projection of the difference in", "of master image') masterTime = [self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid()", "License. # You may obtain a copy of the License", "self.startingRange2 = range return def getHBaselineTop(self): return self.hBaselineTop def getHBaselineRate(self):", "or citizen of those countries. # # Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", "[] self.optionalVariables = [] return None def createPorts(self): # Set", "= [self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower()", "the master frame def calculateLookAngle(self): lookVector = self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0]))", "'+ '\"bottom\" implies at bottom of master image, '+ '\"middle\"", "the end of the scene # First, get the position", "retlst += (self.pBaselineTop,) retstr += \"Bulk Azimuth Offset: %s\\n\" retlst", "verticalBaselinePolynomialCoefficients[0] self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] self.pBaselineTop = csb[0]", "self.lookAngle def _timeDeltaToSeconds(self,td): return (td.microseconds + (td.seconds + td.days *", "= Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return None def __str__(self): retstr =", "retstr += \"Perpendicular Baseline: %s\\n\" retlst += (self.pBaselineTop,) retstr +=", "return math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate the look vector of the master", "velocity, this is the c, or cross-track vector c =", "= None self.vBaselineAcc = None self.pBaselineTop = None self.pBaselineBottom =", "range and azimuth pixel sizes # the two starting ranges,", "the scene, mid-scene, and the end of the scene #", "of the projection of the difference in position and the", "master image') masterTime = [self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() +", "= frame.getStartingRange() prf = frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize = frame.getInstrument().getRangePixelSize() self.masterOrbit =", "self.azimuthPixelSize = midSV.getScalarVelocity()/prf try: ellipsoid = frame._ellipsoid #UAVSAR frame creates", "self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return None def __str__(self): retstr = \"Initial Baseline", "two frames def baseline(self): #TODO This could be further refactored", "= None self.slaveOrbit = None self.masterFrame = None self.slaveFrame =", "baselines at top of master image') masterTime = [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart()", "this software, # the user agrees to comply with all", "in range(size): A[i] += MInv[i][j]*Y[j] return A def setRangePixelSize(self,pixelSize): self.rangePixelSize", "# Satisfy the old Component self.dictionaryOfOutputVariables = {} self.dictionaryOfVariables =", "+ z) + self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius + self.height) ) # print('Height:", "[] hb = [] csb = [] asb = []", "slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) #", "import StateVector # A class to hold three-dimensional basis vectors", "self.x3 BASELINE_LOCATION = Component.Parameter('baselineLocation', public_name = 'BASELINE_LOCATION', default = 'all',", "self.hBaselineTop def getHBaselineRate(self): return self.hBaselineRate def getHBaselineAcc(self): return self.hBaselineAcc def", "__str__(self): retstr = \"Initial Baseline estimates \\n\" retstr += \"Cross-track", "RESERVED. # # Licensed under the Apache License, Version 2.0", "= self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize = midSV.getScalarVelocity()/prf try: ellipsoid = frame._ellipsoid", "other export # authority as may be required before exporting", "* 24.0 * 3600) * 10**6) / 10**6 def addMasterFrame(self):", "self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) h_rate = crossTrackBaselinePolynomialCoefficients[1] # Calculate the", "slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV)) slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') # Recalculate the", "to obtain export licenses, or other export # authority as", "self.pBaselineBottom = None self.orbSlcAzimuthOffset = None self.orbSlcRangeOffset = None self.rangeOffset", "MM.crossProduct(c,r) # Calculate a the \"velocity\" component that is perpendicular", "None self.startingRange2 = None self.hBaselineTop = None self.hBaselineRate = None", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "MM.crossProduct(r,v) # Calculate the vector perpendicular to the platform position", "= range return def getHBaselineTop(self): return self.hBaselineTop def getHBaselineRate(self): return", "# Calculate the Baseline at the start of the scene,", "/ 10**6 def addMasterFrame(self): frame = self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame = frame", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "length 3.\") raise Exception Y = [0]*size A = [0]*size", "= Component.Parameter('baselineLocation', public_name = 'BASELINE_LOCATION', default = 'all', type=str, mandatory=False,", "= None self.pBaselineBottom = None self.orbSlcAzimuthOffset = None self.orbSlcRangeOffset =", "baselines') masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower() == 'middle': print('Estimating baselines", "First, get the position and velocity at the start of", "language governing permissions and # limitations under the License. #", "required by applicable law or agreed to in writing, software", "= crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop =", "a master time and a slave time and a method", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "the Look Angle of the master frame def calculateLookAngle(self): lookVector", "= None self.hBaselineTop = None self.hBaselineRate = None self.hBaselineAcc =", "+ asb[-1])/2.0 az_offset = (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset = (self.startingRange1", "the cross-track direction and position basis = BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v)", "self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate the distance moved since the last baseline", "agreed to in writing, software # distributed under the License", "into a method that calculates the baseline between #TODO frames", "distributed under the License is distributed on an \"AS IS\"", "vectors of same length.\") raise Exception if not (size ==", "self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else: raise Exception('Unknown baseline location: {0}'.format(self.baselineLocation))", "self.hBaselineRate def getHBaselineAcc(self): return self.hBaselineAcc def getVBaselineTop(self): return self.vBaselineTop def", "csb = [] asb = [] s = [0.,0.,0.] if", "<gh_stars>1-10 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Copyright 2010 California Institute of Technology. ALL", "= frame self.startingRange2 = frame.getStartingRange() self.slaveOrbit = frame.getOrbit() def __init__(self,", "ellipsoid.pegRadCur self.height = frame.platformHeight except: ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius =", "= logging.getLogger('isce.mroipac.baseline') self.createPorts() # Satisfy the old Component self.dictionaryOfOutputVariables =", "getLookAngle(self): return self.lookAngle def _timeDeltaToSeconds(self,td): return (td.microseconds + (td.seconds +", "= None self.masterFrame = None self.slaveFrame = None self.lookAngle =", "+ datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower() == 'bottom': print('Estimating baselines at bottom", "- \"all\" implies '+ 'top, middle, bottom of master image,", "asb_avg)/(self.rangePixelSize) # Populate class attributes self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate =", "+ td.days * 24.0 * 3600) * 10**6) / 10**6", "regulations. # The user has the responsibility to obtain export", "= self.startingRange1 - self.startingRange2 # Calculate a quadratic fit to", "spacecraft baselines class BaselineBasis(Basis): def __init__(self): Basis.__init__(self) def setPositionVector(self,x): self.x1", "# Calculate the distance moved since the last baseline point", "start of the scene self.logger.info(\"Sampling time %s\" % i) masterBasis", "self.rangePixelSize = pixelSize return def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize = pixelSize return", "print('Using entire span of image for estimating baselines') masterTime =", "licenses, or other export # authority as may be required", "M = [[0 for i in range(size) ] for j", "c, or cross-track vector c = MM.normalizeVector(c) v = MM.crossProduct(c,r)", "# Calculate a the \"velocity\" component that is perpendicular to", "Calculate the gross azimuth and range offsets azb_avg = (az_offset[0]", "def __init__(self): Basis.__init__(self) def setPositionVector(self,x): self.x1 = x def getPositionVector(self):", "projection of the difference in position and the \"velocity\" component", "MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset # Calculate the baseline components between two", "OR CONDITIONS OF ANY KIND, either express or implied. #", "subject to # U.S. export control laws and regulations and", "limitations under the License. # # United States Government Sponsorship", "the License is distributed on an \"AS IS\" BASIS, #", "= None self.vBaselineTop = None self.vBaselineRate = None self.vBaselineAcc =", "self.descriptionOfVariables = {} self.mandatoryVariables = [] self.optionalVariables = [] return", "sv = orbit.interpolateOrbit(time, method='hermite') x1 = sv.getPosition() v = sv.getVelocity()", "if self.baselineLocation.lower() == 'all': print('Using entire span of image for", "sizes # the two starting ranges, a planet, and the", "Look Angle of the master frame def calculateLookAngle(self): lookVector =", "= self.calculateBasis(self.masterOrbit,masterTime[i]) normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate the distance moved", "a slave time and a method that calls this method", "retlst += (self.orbSlcAzimuthOffset,) retstr += \"Bulk Range Offset: %s\\n\" retlst", "components by the look angle vector asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) #Calculating", "countries. # # Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math import datetime", "#Calculating baseline crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) h_rate =", "which to compute baselines - \"all\" implies '+ 'top, middle,", "of the scene, mid-scene, and the end of the scene", "law or agreed to in writing, software # distributed under", "slave time and a method that calls this method #TODO", "Government Sponsorship acknowledged. This software is subject to # U.S.", "self.pBaselineBottom def getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset def", "def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize = pixelSize return def setHeight(self,var): self.height =", "baselines class BaselineBasis(Basis): def __init__(self): Basis.__init__(self) def setPositionVector(self,x): self.x1 =", "for j in range(size)] for j in range(size): for i", "vector asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) #Calculating baseline crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients", "(self.pBaselineTop,) retstr += \"Bulk Azimuth Offset: %s\\n\" retlst += (self.orbSlcAzimuthOffset,)", "basis, calculate the offset between the two positions in this", "# Save the position offset # Calculate a new start", "normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate the distance moved since the", "Component.Parameter('baselineLocation', public_name = 'BASELINE_LOCATION', default = 'all', type=str, mandatory=False, doc", "= crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc =", "None self.slaveFrame = None self.lookAngle = None self.rangePixelSize = None", "= frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize = frame.getInstrument().getRangePixelSize() self.masterOrbit = frame.getOrbit() midSV =", "may obtain a copy of the License at # #", "frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize = frame.getInstrument().getRangePixelSize() self.masterOrbit = frame.getOrbit() midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(),", "x1 = sv.getPosition() v = sv.getVelocity() r = MM.normalizeVector(x1) #", "createPorts(self): # Set input ports # It looks like we", "= None self.slaveFrame = None self.lookAngle = None self.rangePixelSize =", "may not use this file except in compliance with the", "range(size): A[i] += MInv[i][j]*Y[j] return A def setRangePixelSize(self,pixelSize): self.rangePixelSize =", "The user has the responsibility to obtain export licenses, or", "None self.startingRange1 = None self.startingRange2 = None self.hBaselineTop = None", "vectors of length 3.\") raise Exception Y = [0]*size A", "def getCrossTrackVector(self): return self.x3 BASELINE_LOCATION = Component.Parameter('baselineLocation', public_name = 'BASELINE_LOCATION',", "this file except in compliance with the License. # You", "bottom of master image, '+ '\"middle\" implies near middle of", "def getHBaselineRate(self): return self.hBaselineRate def getHBaselineAcc(self): return self.hBaselineAcc def getVBaselineTop(self):", "of baseline change over time. for port in self.inputPorts: port()", "the baseline between #TODO frames when given a master time", "= [] vb = [] hb = [] csb =", "10**6 def addMasterFrame(self): frame = self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame = frame self.startingRange1", "__init__(self): self.x1 = [] self.x2 = [] self.x3 = []", "to the platform position and velocity, this is the c,", "classified as 'EAR99 NLR' # (No [Export] License Required except", "masterTime = [self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else:", "# # Licensed under the Apache License, Version 2.0 (the", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "control laws and regulations and has been classified as 'EAR99", "+ self.height) ) # print('Height: ', self.height) # print('Radius: ',", "calculateLookAngle(self): lookVector = self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate the look", "lookVector = self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate the look vector", "velocity at the start of the scene self.logger.info(\"Sampling time %s\"", "and vertical baseline components by the look angle vector asb.append(-hb[i]*lookVector[1]", "x def getPositionVector(self): return self.x1 def setVelocityVector(self,v): self.x2 = v", "'middle': print('Estimating baselines around center of master image') masterTime =", "'+ '\"middle\" implies near middle of master image. '+ 'To", "frame = self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame = frame self.startingRange2 = frame.getStartingRange() self.slaveOrbit", "# Calculate the difference between the master and slave position", "range(len(x1))] # Calculate the difference between the master and slave", "span of image for estimating baselines') masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif", "i in range(size): M[j][i] = math.pow(xRef[j],i) Y[j] = yRef[j] MInv", "# Set input ports # It looks like we really", "ellipsoid = frame._ellipsoid #UAVSAR frame creates ellipsoid with peg self.radius", "top of master image') masterTime = [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0),", "baseline(self): #TODO This could be further refactored into a method", "or implied. # See the License for the specific language", "Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return None def __str__(self): retstr = \"Initial", "difference in position and the \"velocity\" component v_offset = MM.dotProduct(dx,basis.getPositionVector())", "'top, middle, bottom of master image, '+ '\"top\" implies near", "print('Radius: ', self.radius) # print('Range: ', self.startingRange1) # print('COSL: ',", "retstr += \"Cross-track Baseline: %s\\n\" retlst = (self.hBaselineTop,) retstr +=", "vectors and a basis, calculate the offset between the two", "position and velocity at the start of the scene self.logger.info(\"Sampling", "direction and position basis = BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c) return", "= v def getVelocityVector(self): return self.x2 def setCrossTrackVector(self,c): self.x3 =", "baseline between #TODO frames when given a master time and", "Required except when exporting to an embargoed country, # end", "'+ '\"top\" implies near start of master image, '+ '\"bottom\"", "self.vBaselineTop def getVBaselineRate(self): return self.vBaselineRate def getVBaselineAcc(self): return self.vBaselineAcc def", "MM.normalizeVector(x1) # Turn the position vector into a unit vector", "self.radius) # print('Range: ', self.startingRange1) # print('COSL: ', cosl) sinl", "in range(size): for j in range(size): A[i] += MInv[i][j]*Y[j] return", "master time and a slave time and a method that", "# print('Range: ', self.startingRange1) # print('COSL: ', cosl) sinl =", "in range(len(x1))] # Calculate the difference between the master and", "and has been classified as 'EAR99 NLR' # (No [Export]", "Baseline: %s\\n\" retlst += (self.vBaselineTop,) retstr += \"Perpendicular Baseline: %s\\n\"", "Expecting input vectors of length 3.\") raise Exception Y =", "= [] self.x2 = [] self.x3 = [] # A", "for i in range(size) ] for j in range(size)] for", "position and velocity, this is the c, or cross-track vector", "two positions in this basis def calculateBasisOffset(self,x1,x2,basis): dx = [(x2[j]", "in self.inputPorts: port() lookVector = self.calculateLookVector() az_offset = [] vb", "return self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset def getRangeOffset(self): return self.rangeOffset", "These provide the range and azimuth pixel sizes, starting ranges,", "the offsets x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset)", "a unit vector c = MM.crossProduct(r,v) # Calculate the vector", "elif self.baselineLocation.lower() == 'top': print('Estimating baselines at top of master", "return A def setRangePixelSize(self,pixelSize): self.rangePixelSize = pixelSize return def setAzimuthPixelSize(self,pixelSize):", "Institute of Technology. ALL RIGHTS RESERVED. # # Licensed under", "# based on the spacecraft position def calculateBasis(self,orbit,time): sv =", "#TODO frames when given a master time and a slave", "been classified as 'EAR99 NLR' # (No [Export] License Required", "self.height = frame.platformHeight except: ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius = ellipsoid.get_a()", "Baseline: %s\\n\" retlst += (self.pBaselineTop,) retstr += \"Bulk Azimuth Offset:", "range(3): # Calculate the Baseline at the start of the", "image') masterTime = [self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()]", "range(size): M[j][i] = math.pow(xRef[j],i) Y[j] = yRef[j] MInv = MM.invertMatrix(M)", "at which to compute baselines - \"all\" implies '+ 'top,", "* 3600) * 10**6) / 10**6 def addMasterFrame(self): frame =", "except: ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius = ellipsoid.get_a() self.height = midSV.calculateHeight(ellipsoid)", "self.radius = ellipsoid.pegRadCur self.height = frame.platformHeight except: ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp()", "ranges, a planet, and the two prfs # These provide", "Calculate the scalar spacecraft velocity def calculateScalarVelocity(self,orbit,time): sv = orbit.interpolateOrbit(time,", "# First, get the position and velocity at the start", "the length of the projection of the difference in position", "at bottom of master image, '+ '\"middle\" implies near middle", "vector perpendicular to the platform position and velocity, this is", "verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) h_rate = crossTrackBaselinePolynomialCoefficients[1] # Calculate the gross", "A class to hold three-dimensional basis vectors for spacecraft baselines", "setCrossTrackVector(self,c): self.x3 = c def getCrossTrackVector(self): return self.x3 BASELINE_LOCATION =", "= 'baseline' logging_name = 'isce.mroipac.baseline' parameter_list = (BASELINE_LOCATION,) # Calculate", "the orbits # These provide the range and azimuth pixel", "Calculate the distance moved since the last baseline point if", "to calculate the rate of baseline change over time. for", "calculate the offset between the two positions in this basis", "time and a method that calls this method #TODO multiple", "cross-track direction and position basis = BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c)", "BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c) return basis # Given two position", "cosl*cosl) return [cosl,sinl] # Calculate the scalar spacecraft velocity def", "x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0] +", "iscesys.Component.Component import Component, Port from isceobj.Util.mathModule import MathModule as MM", "as may be required before exporting this software to any", "(size == 3): print(\"Error. Expecting input vectors of length 3.\")", "= frame.getOrbit() midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize = midSV.getScalarVelocity()/prf try:", "of master image') masterTime = [self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() -", "= (self.startingRange1 - self.startingRange2 - asb_avg)/(self.rangePixelSize) # Populate class attributes", "= self.calculateLookVector() az_offset = [] vb = [] hb =", "the \"velocity\" component v_offset = MM.dotProduct(dx,basis.getPositionVector()) c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) return", "estimating baselines') masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower() == 'middle': print('Estimating", "= orbit.interpolateOrbit(time, method='hermite') x1 = sv.getPosition() v = sv.getVelocity() r", "of the master frame def calculateLookAngle(self): lookVector = self.calculateLookVector() return", "from iscesys.Component.Component import Component, Port from isceobj.Util.mathModule import MathModule as", "# (No [Export] License Required except when exporting to an", "in writing, software # distributed under the License is distributed", "governing permissions and # limitations under the License. # #", "self.startingRange1) # print('COSL: ', cosl) sinl = math.sqrt(1 - cosl*cosl)", "def getHBaselineTop(self): return self.hBaselineTop def getHBaselineRate(self): return self.hBaselineRate def getHBaselineAcc(self):", "<NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math import datetime import logging from iscesys.Component.Component", "an orthogonal basis for cross-track and velocity directions # based", "that calculates the baseline between #TODO frames when given a", "\"Perpendicular Baseline: %s\\n\" retlst += (self.pBaselineTop,) retstr += \"Bulk Azimuth", "export # authority as may be required before exporting this", "to the baseline polynomial def polynomialFit(self,xRef,yRef): size = len(xRef) if", "three-dimensional basis vectors class Basis(object): def __init__(self): self.x1 = []", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "{0}'.format(self.baselineLocation)) slaveTime = [self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)]", "License, Version 2.0 (the \"License\"); # you may not use", "= pixelSize return def setHeight(self,var): self.height = float(var) return def", "the vector perpendicular to the platform position and velocity, this", "or cross-track vector c = MM.normalizeVector(c) v = MM.crossProduct(c,r) #", "self.x2 = v def getVelocityVector(self): return self.x2 def setCrossTrackVector(self,c): self.x3", "self.pBaselineTop = csb[0] self.pBaselineBottom = csb[-1] self.orbSlcAzimuthOffset = az_offset self.orbSlcRangeOffset", "frame creates ellipsoid with peg self.radius = ellipsoid.pegRadCur self.height =", "basis.setCrossTrackVector(c) return basis # Given two position vectors and a", "isceobj.Util.mathModule import MathModule as MM from isceobj.Orbit.Orbit import StateVector #", "the start of the scene, mid-scene, and the end of", "len(xRef) if not (len(xRef) == len(yRef)): print(\"Error. Expecting input vectors", "self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame = frame self.startingRange2 = frame.getStartingRange() self.slaveOrbit = frame.getOrbit()", "port() lookVector = self.calculateLookVector() az_offset = [] vb = []", "Given two position vectors and a basis, calculate the offset", "middle, bottom of master image, '+ '\"top\" implies near start", "def setVelocityVector(self,v): self.x2 = v def getVelocityVector(self): return self.x2 def", "pixel sizes # the two starting ranges, a planet, and", "the range and azimuth pixel sizes, starting ranges, # satellite", "-99999 super(Baseline, self).__init__(family=self.__class__.family, name=name) self.logger = logging.getLogger('isce.mroipac.baseline') self.createPorts() # Satisfy", "vb[i]*lookVector[0]) #Calculating baseline crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) h_rate", "the License for the specific language governing permissions and #", "end of the scene # First, get the position and", "+= (self.vBaselineTop,) retstr += \"Perpendicular Baseline: %s\\n\" retlst += (self.pBaselineTop,)", "(BASELINE_LOCATION,) # Calculate the Look Angle of the master frame", "crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop = verticalBaselinePolynomialCoefficients[0]", "a method that calls this method #TODO multiple times to", "def getVBaselineRate(self): return self.vBaselineRate def getVBaselineAcc(self): return self.vBaselineAcc def getPBaselineTop(self):", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "support of a prohibited end use). By downloading this software,", "position vectors and a basis, calculate the offset between the", "= s[i-1] + deltaT*normV masterSV = self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV =", "really need two orbits, a time, range and azimuth pixel", "self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc", "self.slaveFrame = frame self.startingRange2 = frame.getStartingRange() self.slaveOrbit = frame.getOrbit() def", "= c def getCrossTrackVector(self): return self.x3 BASELINE_LOCATION = Component.Parameter('baselineLocation', public_name", "i) masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i]) normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate the", "# the two starting ranges, a planet, and the two", "pixel sizes, starting ranges, # satellite heights and times for", "of master image, '+ '\"top\" implies near start of master", "= [] self.optionalVariables = [] return None def createPorts(self): #", "Save the position offset # Calculate a new start time", "= self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1 = masterSV.getPosition() x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset)", "\"Bulk Azimuth Offset: %s\\n\" retlst += (self.orbSlcAzimuthOffset,) retstr += \"Bulk", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "self.phaseConst = -99999 super(Baseline, self).__init__(family=self.__class__.family, name=name) self.logger = logging.getLogger('isce.mroipac.baseline') self.createPorts()", "the offset between the two positions in this basis def", "import datetime import logging from iscesys.Component.Component import Component, Port from", "= range return def setSlaveStartingRange(self,range): self.startingRange2 = range return def", "peg self.radius = ellipsoid.pegRadCur self.height = frame.platformHeight except: ellipsoid =", "and the end of the scene # First, get the", "ranges, # satellite heights and times for the first lines", "raise Exception Y = [0]*size A = [0]*size M =", "baseline change over time. for port in self.inputPorts: port() lookVector", "and azimuth pixel sizes # the two starting ranges, a", "vector v = MM.normalizeVector(v) # Turn the velocity vector into", "+ datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower() == 'bottom': print('Estimating", "the master and slave position vectors z_offset = MM.dotProduct(dx,basis.getVelocityVector()) #", "= verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] self.pBaselineTop = csb[0] self.pBaselineBottom =", "time, calculate an orthogonal basis for cross-track and velocity directions", "of image for estimating baselines') masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower()", "start of master image, '+ '\"bottom\" implies at bottom of", "= MM.invertMatrix(M) for i in range(size): for j in range(size):", "self.logger.info(\"Sampling time %s\" % i) masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i]) normV =", "Turn the position vector into a unit vector v =", "', self.startingRange1) # print('COSL: ', cosl) sinl = math.sqrt(1 -", "(z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) # Save the position offset #", "# distributed under the License is distributed on an \"AS", "software, # the user agrees to comply with all applicable", "# Unless required by applicable law or agreed to in", "datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower() == 'top': print('Estimating baselines at top of", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "self.lookAngle = None self.rangePixelSize = None self.azimuthPixelSize = None self.height", "is a large shift between images.') ) class Baseline(Component): family", "Given an orbit and a time, calculate an orthogonal basis", "= math.sqrt(1 - cosl*cosl) return [cosl,sinl] # Calculate the scalar", "basis # Given two position vectors and a basis, calculate", "name=''): self.masterOrbit = None self.slaveOrbit = None self.masterFrame = None", "near start of master image, '+ '\"bottom\" implies at bottom", "Multiply the horizontal and vertical baseline components by the look", "for i in range(3): # Calculate the Baseline at the", "the Apache License, Version 2.0 (the \"License\"); # you may", "self.hBaselineRate = None self.hBaselineAcc = None self.vBaselineTop = None self.vBaselineRate", "# limitations under the License. # # United States Government", "baseline point if (i > 0): deltaT = self._timeDeltaToSeconds(masterTime[i] -", "self.x3 = [] # A class to hold three-dimensional basis", "the difference between the master and slave position vectors z_offset", "the Baseline at the start of the scene, mid-scene, and", "return self.rangeOffset def getPhaseConst(self): return self.phaseConst def getLookAngle(self): return self.lookAngle", "bottom of master image, '+ '\"top\" implies near start of", "def addMasterFrame(self): frame = self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame = frame self.startingRange1 =", "calls this method #TODO multiple times to calculate the rate", "master image') masterTime = [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() +", "self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius + self.height) ) # print('Height: ', self.height) #", "return self.x3 BASELINE_LOCATION = Component.Parameter('baselineLocation', public_name = 'BASELINE_LOCATION', default =", "slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) # Save the position offset", "at top of master image') masterTime = [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() +", "and velocity directions # based on the spacecraft position def", "rate of baseline change over time. for port in self.inputPorts:", "relativeSlaveTime = slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV)) slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') #", "if not (len(xRef) == len(yRef)): print(\"Error. Expecting input vectors of", "Baseline: %s\\n\" retlst = (self.hBaselineTop,) retstr += \"Vertical Baseline: %s\\n\"", "'all': print('Using entire span of image for estimating baselines') masterTime", "into a unit vector c = MM.crossProduct(r,v) # Calculate the", "last baseline point if (i > 0): deltaT = self._timeDeltaToSeconds(masterTime[i]", "Calculate the Look Angle of the master frame def calculateLookAngle(self):", "of the scene self.logger.info(\"Sampling time %s\" % i) masterBasis =", "asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) #Calculating baseline crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients =", "downloading this software, # the user agrees to comply with", "= [0]*size M = [[0 for i in range(size) ]", "the responsibility to obtain export licenses, or other export #", "MM.normalizeVector(c) v = MM.crossProduct(c,r) # Calculate a the \"velocity\" component", "when exporting to an embargoed country, # end user, or", "old Component self.dictionaryOfOutputVariables = {} self.dictionaryOfVariables = {} self.descriptionOfVariables =", "crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) h_rate = crossTrackBaselinePolynomialCoefficients[1] #", "self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower() == 'bottom':", "self.orbSlcAzimuthOffset = None self.orbSlcRangeOffset = None self.rangeOffset = None self.phaseConst", "self._inputPorts.add(slaveFramePort) return None def __str__(self): retstr = \"Initial Baseline estimates", "under the License is distributed on an \"AS IS\" BASIS,", "embargoed country, # end user, or in support of a", "cross-track vector c = MM.normalizeVector(c) v = MM.crossProduct(c,r) # Calculate", "in range(size)] for j in range(size): for i in range(size):", "at the start of the scene, mid-scene, and the end", "self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] self.pBaselineTop = csb[0] self.pBaselineBottom", "the position offset # Calculate a new start time relativeSlaveTime", "print('Estimating baselines at bottom of master image') masterTime = [self.masterFrame.getSensingStop()", "= self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame = frame self.startingRange2 = frame.getStartingRange() self.slaveOrbit =", "self.rangeOffset = self.startingRange1 - self.startingRange2 # Calculate a quadratic fit", "def getHBaselineAcc(self): return self.hBaselineAcc def getVBaselineTop(self): return self.vBaselineTop def getVBaselineRate(self):", "class attributes self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc =", "image for estimating baselines') masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower() ==", "return self.pBaselineBottom def getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset", "self.mandatoryVariables = [] self.optionalVariables = [] return None def createPorts(self):", "self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') # Recalculate the offsets x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset)", "self.height + z) + self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius + self.height) ) #", "return self.x1 def setVelocityVector(self,v): self.x2 = v def getVelocityVector(self): return", "implies near middle of master image. '+ 'To be used", "to # U.S. export control laws and regulations and has", "Baseline at the start of the scene, mid-scene, and the", "position and the \"velocity\" component v_offset = MM.dotProduct(dx,basis.getPositionVector()) c_offset =", "# Calculate the scalar spacecraft velocity def calculateScalarVelocity(self,orbit,time): sv =", "scene, mid-scene, and the end of the scene # First,", "into a unit vector v = MM.normalizeVector(v) # Turn the", "self.startingRange2 = frame.getStartingRange() self.slaveOrbit = frame.getOrbit() def __init__(self, name=''): self.masterOrbit", "if (i > 0): deltaT = self._timeDeltaToSeconds(masterTime[i] - masterTime[0]) s[i]", "[0]*size A = [0]*size M = [[0 for i in", "self.pBaselineBottom = csb[-1] self.orbSlcAzimuthOffset = az_offset self.orbSlcRangeOffset = r_offset self.rangeOffset", "ANY KIND, either express or implied. # See the License", "x1[j]) for j in range(len(x1))] # Calculate the difference between", "self.x1 def setVelocityVector(self,v): self.x2 = v def getVelocityVector(self): return self.x2", "at bottom of master image') masterTime = [self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0),", "the License. # You may obtain a copy of the", "to an embargoed country, # end user, or in support", "Offset: %s\\n\" retlst += (self.orbSlcAzimuthOffset,) retstr += \"Bulk Range Offset:", "# See the License for the specific language governing permissions", "regulations and has been classified as 'EAR99 NLR' # (No", "self.startingRange2 = None self.hBaselineTop = None self.hBaselineRate = None self.hBaselineAcc", "= [] # A class to hold three-dimensional basis vectors", "used in case there is a large shift between images.')", "= {} self.descriptionOfVariables = {} self.mandatoryVariables = [] self.optionalVariables =", "setVelocityVector(self,v): self.x2 = v def getVelocityVector(self): return self.x2 def setCrossTrackVector(self,c):", "an orbit and a time, calculate an orthogonal basis for", "self.masterOrbit = None self.slaveOrbit = None self.masterFrame = None self.slaveFrame", "# slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i in range(3): # Calculate", "= [[0 for i in range(size) ] for j in", "input ports # It looks like we really need two", "to comply with all applicable U.S. export laws and regulations.", "= None self.pBaselineTop = None self.pBaselineBottom = None self.orbSlcAzimuthOffset =", "def setRadius(self,radius): self.radius = radius return def setMasterStartingRange(self,range): self.startingRange1 =", "Calculate the difference between the master and slave position vectors", "a time, calculate an orthogonal basis for cross-track and velocity", "= MM.normalizeVector(x1) # Turn the position vector into a unit", "== len(yRef)): print(\"Error. Expecting input vectors of same length.\") raise", "scalar spacecraft velocity def calculateScalarVelocity(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') v", "= self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) h_rate = crossTrackBaselinePolynomialCoefficients[1] # Calculate", "try: z = self.masterFrame.terrainHeight except: z = 0.0 cosl =", "deltaT = self._timeDeltaToSeconds(masterTime[i] - masterTime[0]) s[i] = s[i-1] + deltaT*normV", "around center of master image') masterTime = [self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0),", "2*self.startingRange1*(self.radius + self.height) ) # print('Height: ', self.height) # print('Radius:", "that is perpendicular to the cross-track direction and position basis", "orbits, a time, range and azimuth pixel sizes # the", "a the \"velocity\" component that is perpendicular to the cross-track", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "td.days * 24.0 * 3600) * 10**6) / 10**6 def", "writing, software # distributed under the License is distributed on", "+ datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower() == 'top': print('Estimating baselines at top", "== 'middle': print('Estimating baselines around center of master image') masterTime", "new start time relativeSlaveTime = slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV)) slaveSV =", "start time relativeSlaveTime = slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV)) slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime,", "def setCrossTrackVector(self,c): self.x3 = c def getCrossTrackVector(self): return self.x3 BASELINE_LOCATION", "implies near start of master image, '+ '\"bottom\" implies at", "None self.phaseConst = -99999 super(Baseline, self).__init__(family=self.__class__.family, name=name) self.logger = logging.getLogger('isce.mroipac.baseline')", "logging_name = 'isce.mroipac.baseline' parameter_list = (BASELINE_LOCATION,) # Calculate the Look", "and a method that calls this method #TODO multiple times", "sv = orbit.interpolateOrbit(time, method='hermite') v = sv.getVelocity() normV = MM.norm(v)", "pixelSize return def setHeight(self,var): self.height = float(var) return def setRadius(self,radius):", "U.S. export control laws and regulations and has been classified", "software to any 'EAR99' # embargoed foreign country or citizen", "Range Offset: %s\\n\" retlst += (self.orbSlcRangeOffset,) return retstr % retlst", "starting ranges, a planet, and the two prfs # These", "= ((self.height-z)*(2*self.radius + self.height + z) + self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius +", "_timeDeltaToSeconds(self,td): return (td.microseconds + (td.seconds + td.days * 24.0 *", "slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1 = masterSV.getPosition() x2 = slaveSV.getPosition()", "a unit vector v = MM.normalizeVector(v) # Turn the velocity", "\"all\" implies '+ 'top, middle, bottom of master image, '+", "= r_offset self.rangeOffset = self.startingRange1 - self.startingRange2 # Calculate a", "r = MM.normalizeVector(x1) # Turn the position vector into a", "3600) * 10**6) / 10**6 def addMasterFrame(self): frame = self._inputPorts.getPort(name='masterFrame').getObject()", "24.0 * 3600) * 10**6) / 10**6 def addMasterFrame(self): frame", "\\n\" retstr += \"Cross-track Baseline: %s\\n\" retlst = (self.hBaselineTop,) retstr", "Basis(object): def __init__(self): self.x1 = [] self.x2 = [] self.x3", "obtain export licenses, or other export # authority as may", "unit vector v = MM.normalizeVector(v) # Turn the velocity vector", "%s\\n\" retlst += (self.pBaselineTop,) retstr += \"Bulk Azimuth Offset: %s\\n\"", "# U.S. export control laws and regulations and has been", "the License. # # United States Government Sponsorship acknowledged. This", "size = len(xRef) if not (len(xRef) == len(yRef)): print(\"Error. Expecting", "self.orbSlcRangeOffset = r_offset self.rangeOffset = self.startingRange1 - self.startingRange2 # Calculate", "image. '+ 'To be used in case there is a", "time relativeSlaveTime = slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV)) slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite')", "print(\"Error. Expecting input vectors of same length.\") raise Exception if", "position basis = BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c) return basis #", "getPositionVector(self): return self.x1 def setVelocityVector(self,v): self.x2 = v def getVelocityVector(self):", "MM.normalizeVector(v) # Turn the velocity vector into a unit vector", "(z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) # Multiply", "for cross-track and velocity directions # based on the spacecraft", "= BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c) return basis # Given two", "and # limitations under the License. # # United States", "exporting this software to any 'EAR99' # embargoed foreign country", "Calculate a new start time relativeSlaveTime = slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV))", "math import datetime import logging from iscesys.Component.Component import Component, Port", "setMasterStartingRange(self,range): self.startingRange1 = range return def setSlaveStartingRange(self,range): self.startingRange2 = range", "the platform position and velocity, this is the c, or", "a prohibited end use). By downloading this software, # the", "from isceobj.Util.mathModule import MathModule as MM from isceobj.Orbit.Orbit import StateVector", "look angle vector asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) #Calculating baseline crossTrackBaselinePolynomialCoefficients =", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "math.pow(xRef[j],i) Y[j] = yRef[j] MInv = MM.invertMatrix(M) for i in", "vb = [] hb = [] csb = [] asb", "self.hBaselineAcc def getVBaselineTop(self): return self.vBaselineTop def getVBaselineRate(self): return self.vBaselineRate def", "position offset # Calculate a new start time relativeSlaveTime =", "# Calculate the look vector of the master frame def", "print('COSL: ', cosl) sinl = math.sqrt(1 - cosl*cosl) return [cosl,sinl]", "return None def createPorts(self): # Set input ports # It", "BaselineBasis(Basis): def __init__(self): Basis.__init__(self) def setPositionVector(self,x): self.x1 = x def", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "# Calculate a quadratic fit to the baseline polynomial def", "M[j][i] = math.pow(xRef[j],i) Y[j] = yRef[j] MInv = MM.invertMatrix(M) for", "csb[-1] self.orbSlcAzimuthOffset = az_offset self.orbSlcRangeOffset = r_offset self.rangeOffset = self.startingRange1", "calculate an orthogonal basis for cross-track and velocity directions #", "to any 'EAR99' # embargoed foreign country or citizen of", "self.dictionaryOfVariables = {} self.descriptionOfVariables = {} self.mandatoryVariables = [] self.optionalVariables", "[] # A class to hold three-dimensional basis vectors for", "Technology. ALL RIGHTS RESERVED. # # Licensed under the Apache", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "length.\") raise Exception if not (size == 3): print(\"Error. Expecting", "None self.pBaselineTop = None self.pBaselineBottom = None self.orbSlcAzimuthOffset = None", "responsibility to obtain export licenses, or other export # authority", "self.vBaselineTop = None self.vBaselineRate = None self.vBaselineAcc = None self.pBaselineTop", "It looks like we really need two orbits, a time,", "images.') ) class Baseline(Component): family = 'baseline' logging_name = 'isce.mroipac.baseline'", "= MM.crossProduct(r,v) # Calculate the vector perpendicular to the platform", "return [cosl,sinl] # Calculate the scalar spacecraft velocity def calculateScalarVelocity(self,orbit,time):", "between the master and slave position vectors z_offset = MM.dotProduct(dx,basis.getVelocityVector())", "return self.x2 def setCrossTrackVector(self,c): self.x3 = c def getCrossTrackVector(self): return", "cosl) sinl = math.sqrt(1 - cosl*cosl) return [cosl,sinl] # Calculate", "None self.vBaselineRate = None self.vBaselineAcc = None self.pBaselineTop = None", "use). By downloading this software, # the user agrees to", "return self.vBaselineTop def getVBaselineRate(self): return self.vBaselineRate def getVBaselineAcc(self): return self.vBaselineAcc", "perpendicular to the cross-track direction and position basis = BaselineBasis()", "'+ 'top, middle, bottom of master image, '+ '\"top\" implies", "quadratic fit to the baseline polynomial def polynomialFit(self,xRef,yRef): size =", "verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] self.pBaselineTop = csb[0] self.pBaselineBottom = csb[-1]", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "3): print(\"Error. Expecting input vectors of length 3.\") raise Exception", "getVelocityVector(self): return self.x2 def setCrossTrackVector(self,c): self.x3 = c def getCrossTrackVector(self):", "normV = MM.norm(v) return normV # Given an orbit and", "basis for cross-track and velocity directions # based on the", "scene # First, get the position and velocity at the", "specific language governing permissions and # limitations under the License.", "- x1[j]) for j in range(len(x1))] # Calculate the difference", "print('Estimating baselines around center of master image') masterTime = [self.masterFrame.getSensingMid()", "azimuth pixel sizes # the two starting ranges, a planet,", "self.masterFrame.terrainHeight except: z = 0.0 cosl = ((self.height-z)*(2*self.radius + self.height", "at the start of the scene self.logger.info(\"Sampling time %s\" %", "datetime.timedelta(seconds=1.0)] # slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i in range(3): #", "the two positions in this basis def calculateBasisOffset(self,x1,x2,basis): dx =", "the last baseline point if (i > 0): deltaT =", "[[0 for i in range(size) ] for j in range(size)]", "case there is a large shift between images.') ) class", "slave position vectors z_offset = MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the length", "# # Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math import datetime import", "retstr += \"Bulk Range Offset: %s\\n\" retlst += (self.orbSlcRangeOffset,) return", "vectors for spacecraft baselines class BaselineBasis(Basis): def __init__(self): Basis.__init__(self) def", "except when exporting to an embargoed country, # end user,", "self.slaveFrame = None self.lookAngle = None self.rangePixelSize = None self.azimuthPixelSize", "= self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) # Save the position offset # Calculate", "# you may not use this file except in compliance", "def getPositionVector(self): return self.x1 def setVelocityVector(self,v): self.x2 = v def", "elif self.baselineLocation.lower() == 'bottom': print('Estimating baselines at bottom of master", "setRadius(self,radius): self.radius = radius return def setMasterStartingRange(self,range): self.startingRange1 = range", "MM.invertMatrix(M) for i in range(size): for j in range(size): A[i]", "Component self.dictionaryOfOutputVariables = {} self.dictionaryOfVariables = {} self.descriptionOfVariables = {}", "the position and velocity at the start of the scene", "in range(size): M[j][i] = math.pow(xRef[j],i) Y[j] = yRef[j] MInv =", "j in range(size)] for j in range(size): for i in", "baselines - \"all\" implies '+ 'top, middle, bottom of master", "frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius = ellipsoid.get_a() self.height = midSV.calculateHeight(ellipsoid) def addSlaveFrame(self): frame", "authority as may be required before exporting this software to", "software is subject to # U.S. export control laws and", "range return def getHBaselineTop(self): return self.hBaselineTop def getHBaselineRate(self): return self.hBaselineRate", "time, range and azimuth pixel sizes # the two starting", "10**6) / 10**6 def addMasterFrame(self): frame = self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame =", "NLR' # (No [Export] License Required except when exporting to", "the velocity vector into a unit vector c = MM.crossProduct(r,v)", "first lines masterFramePort = Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort)", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "+= \"Perpendicular Baseline: %s\\n\" retlst += (self.pBaselineTop,) retstr += \"Bulk", "+= (self.orbSlcAzimuthOffset,) retstr += \"Bulk Range Offset: %s\\n\" retlst +=", "with all applicable U.S. export laws and regulations. # The", "Exception('Unknown baseline location: {0}'.format(self.baselineLocation)) slaveTime = [self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(),", "is the c, or cross-track vector c = MM.normalizeVector(c) v", "= self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1 = masterSV.getPosition()", "return (td.microseconds + (td.seconds + td.days * 24.0 * 3600)", "#TODO This could be further refactored into a method that", "= self._timeDeltaToSeconds(masterTime[i] - masterTime[0]) s[i] = s[i-1] + deltaT*normV masterSV", "return self.vBaselineAcc def getPBaselineTop(self): return self.pBaselineTop def getPBaselineBottom(self): return self.pBaselineBottom", "s[i] = s[i-1] + deltaT*normV masterSV = self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV", "under the Apache License, Version 2.0 (the \"License\"); # you", "ALL RIGHTS RESERVED. # # Licensed under the Apache License,", "self.x2 = [] self.x3 = [] # A class to", "polynomialFit(self,xRef,yRef): size = len(xRef) if not (len(xRef) == len(yRef)): print(\"Error.", "return self.vBaselineRate def getVBaselineAcc(self): return self.vBaselineAcc def getPBaselineTop(self): return self.pBaselineTop", "def setHeight(self,var): self.height = float(var) return def setRadius(self,radius): self.radius =", "+ deltaT*normV masterSV = self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite')", "any 'EAR99' # embargoed foreign country or citizen of those", "\"Initial Baseline estimates \\n\" retstr += \"Cross-track Baseline: %s\\n\" retlst", "midSV.calculateHeight(ellipsoid) def addSlaveFrame(self): frame = self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame = frame self.startingRange2", "vector into a unit vector c = MM.crossProduct(r,v) # Calculate", "= [0]*size A = [0]*size M = [[0 for i", "= masterSV.getPosition() x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) #", "import logging from iscesys.Component.Component import Component, Port from isceobj.Util.mathModule import", "= 'all', type=str, mandatory=False, doc = ('Location at which to", "self.x1 = x def getPositionVector(self): return self.x1 def setVelocityVector(self,v): self.x2", "an embargoed country, # end user, or in support of", "self.x1 = [] self.x2 = [] self.x3 = [] #", "image') masterTime = [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)]", "Angle of the master frame def calculateLookAngle(self): lookVector = self.calculateLookVector()", "# Populate class attributes self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1]", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "print('Height: ', self.height) # print('Radius: ', self.radius) # print('Range: ',", "- datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else: raise Exception('Unknown baseline location: {0}'.format(self.baselineLocation)) slaveTime", "hold three-dimensional basis vectors class Basis(object): def __init__(self): self.x1 =", "import Component, Port from isceobj.Util.mathModule import MathModule as MM from", "__init__(self): Basis.__init__(self) def setPositionVector(self,x): self.x1 = x def getPositionVector(self): return", "laws and regulations. # The user has the responsibility to", "self.baselineLocation.lower() == 'bottom': print('Estimating baselines at bottom of master image')", "Recalculate the offsets x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset)", "this method #TODO multiple times to calculate the rate of", "= 0.0 cosl = ((self.height-z)*(2*self.radius + self.height + z) +", "('Location at which to compute baselines - \"all\" implies '+", "calculateLookVector(self): try: z = self.masterFrame.terrainHeight except: z = 0.0 cosl", "= x def getPositionVector(self): return self.x1 def setVelocityVector(self,v): self.x2 =", "# authority as may be required before exporting this software", "method='hermite') v = sv.getVelocity() normV = MM.norm(v) return normV #", "getPBaselineBottom(self): return self.pBaselineBottom def getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self): return", "we really need two orbits, a time, range and azimuth", "= ellipsoid.get_a() self.height = midSV.calculateHeight(ellipsoid) def addSlaveFrame(self): frame = self._inputPorts.getPort(name='slaveFrame').getObject()", "[0.,0.,0.] if self.baselineLocation.lower() == 'all': print('Using entire span of image", "be used in case there is a large shift between", "normV # Given an orbit and a time, calculate an", "the two prfs # These provide the orbits # These", "frame = self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame = frame self.startingRange1 = frame.getStartingRange() prf", "s = [0.,0.,0.] if self.baselineLocation.lower() == 'all': print('Using entire span", "z) + self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius + self.height) ) # print('Height: ',", "distance moved since the last baseline point if (i >", "(td.seconds + td.days * 24.0 * 3600) * 10**6) /", "vectors class Basis(object): def __init__(self): self.x1 = [] self.x2 =", "az_offset self.orbSlcRangeOffset = r_offset self.rangeOffset = self.startingRange1 - self.startingRange2 #", "self.startingRange1 = None self.startingRange2 = None self.hBaselineTop = None self.hBaselineRate", "datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] # slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for", "crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] self.vBaselineRate = verticalBaselinePolynomialCoefficients[1]", "user agrees to comply with all applicable U.S. export laws", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "Turn the velocity vector into a unit vector c =", "middle of master image. '+ 'To be used in case", "A def setRangePixelSize(self,pixelSize): self.rangePixelSize = pixelSize return def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize", "= [] return None def createPorts(self): # Set input ports", "image, '+ '\"top\" implies near start of master image, '+", "of length 3.\") raise Exception Y = [0]*size A =", "self.hBaselineAcc = None self.vBaselineTop = None self.vBaselineRate = None self.vBaselineAcc", "components between two frames def baseline(self): #TODO This could be", "return z_offset,v_offset,c_offset # Calculate the baseline components between two frames", "baseline location: {0}'.format(self.baselineLocation)) slaveTime = [self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid()", "h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset = (self.startingRange1 - self.startingRange2 - asb_avg)/(self.rangePixelSize) # Populate", "- datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else: raise Exception('Unknown baseline", "Apache License, Version 2.0 (the \"License\"); # you may not", "either express or implied. # See the License for the", "By downloading this software, # the user agrees to comply", "prfs # These provide the orbits # These provide the", "agrees to comply with all applicable U.S. export laws and", "= verticalBaselinePolynomialCoefficients[0] self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] self.pBaselineTop =", "MathModule as MM from isceobj.Orbit.Orbit import StateVector # A class", "self.dictionaryOfOutputVariables = {} self.dictionaryOfVariables = {} self.descriptionOfVariables = {} self.mandatoryVariables", "frames def baseline(self): #TODO This could be further refactored into", "for i in range(size): M[j][i] = math.pow(xRef[j],i) Y[j] = yRef[j]", "= [self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] # slaveTime", "Calculate a the \"velocity\" component that is perpendicular to the", "azimuth and range offsets azb_avg = (az_offset[0] + az_offset[-1])/2.0 asb_avg", "Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. #", "three-dimensional basis vectors for spacecraft baselines class BaselineBasis(Basis): def __init__(self):", "self.startingRange1 = range return def setSlaveStartingRange(self,range): self.startingRange2 = range return", "self.slaveOrbit = None self.masterFrame = None self.slaveFrame = None self.lookAngle", "self.rangeOffset = None self.phaseConst = -99999 super(Baseline, self).__init__(family=self.__class__.family, name=name) self.logger", "a large shift between images.') ) class Baseline(Component): family =", "for j in range(len(x1))] # Calculate the difference between the", "self.height = None self.radius = None self.startingRange1 = None self.startingRange2", "= MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset # Calculate the baseline components between", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", ") class Baseline(Component): family = 'baseline' logging_name = 'isce.mroipac.baseline' parameter_list", "self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] self.vBaselineRate", "= None self.orbSlcAzimuthOffset = None self.orbSlcRangeOffset = None self.rangeOffset =", "exporting to an embargoed country, # end user, or in", "baselines around center of master image') masterTime = [self.masterFrame.getSensingMid() -", "self.calculateBasis(self.masterOrbit,masterTime[i]) normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate the distance moved since", "== 'top': print('Estimating baselines at top of master image') masterTime", "platform position and velocity, this is the c, or cross-track", "before exporting this software to any 'EAR99' # embargoed foreign", "v_offset = MM.dotProduct(dx,basis.getPositionVector()) c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset # Calculate", "times to calculate the rate of baseline change over time.", "self.height = float(var) return def setRadius(self,radius): self.radius = radius return", "- masterTime[0]) s[i] = s[i-1] + deltaT*normV masterSV = self.masterOrbit.interpolateOrbit(masterTime[i],", "datetime.timedelta(seconds=(z_offset/normV)) slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') # Recalculate the offsets x2", "has the responsibility to obtain export licenses, or other export", "self).__init__(family=self.__class__.family, name=name) self.logger = logging.getLogger('isce.mroipac.baseline') self.createPorts() # Satisfy the old", "'isce.mroipac.baseline' parameter_list = (BASELINE_LOCATION,) # Calculate the Look Angle of", "like we really need two orbits, a time, range and", "ports # It looks like we really need two orbits,", "of Technology. ALL RIGHTS RESERVED. # # Licensed under the", "0): deltaT = self._timeDeltaToSeconds(masterTime[i] - masterTime[0]) s[i] = s[i-1] +", "= float(var) return def setRadius(self,radius): self.radius = radius return def", "return def getHBaselineTop(self): return self.hBaselineTop def getHBaselineRate(self): return self.hBaselineRate def", "end user, or in support of a prohibited end use).", "[self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] # slaveTime =", "self.startingRange2 - asb_avg)/(self.rangePixelSize) # Populate class attributes self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0]", "= 'isce.mroipac.baseline' parameter_list = (BASELINE_LOCATION,) # Calculate the Look Angle", "getHBaselineRate(self): return self.hBaselineRate def getHBaselineAcc(self): return self.hBaselineAcc def getVBaselineTop(self): return", "# print('Radius: ', self.radius) # print('Range: ', self.startingRange1) # print('COSL:", "getPBaselineTop(self): return self.pBaselineTop def getPBaselineBottom(self): return self.pBaselineBottom def getOrbSlcAzimuthOffset(self): return", "= (BASELINE_LOCATION,) # Calculate the Look Angle of the master", "and regulations and has been classified as 'EAR99 NLR' #", "for spacecraft baselines class BaselineBasis(Basis): def __init__(self): Basis.__init__(self) def setPositionVector(self,x):", "License. # # United States Government Sponsorship acknowledged. This software", "(self.startingRange1 - self.startingRange2 - asb_avg)/(self.rangePixelSize) # Populate class attributes self.hBaselineTop", "between images.') ) class Baseline(Component): family = 'baseline' logging_name =", "getCrossTrackVector(self): return self.x3 BASELINE_LOCATION = Component.Parameter('baselineLocation', public_name = 'BASELINE_LOCATION', default", "= csb[-1] self.orbSlcAzimuthOffset = az_offset self.orbSlcRangeOffset = r_offset self.rangeOffset =", "'EAR99' # embargoed foreign country or citizen of those countries.", "len(yRef)): print(\"Error. Expecting input vectors of same length.\") raise Exception", "isceobj.Orbit.Orbit import StateVector # A class to hold three-dimensional basis", "== 'all': print('Using entire span of image for estimating baselines')", "2010 California Institute of Technology. ALL RIGHTS RESERVED. # #", "#TODO multiple times to calculate the rate of baseline change", "by the look angle vector asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) #Calculating baseline", "times for the first lines masterFramePort = Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort =", "= [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower() == 'middle': print('Estimating baselines around center", "baseline components by the look angle vector asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0])", "a quadratic fit to the baseline polynomial def polynomialFit(self,xRef,yRef): size", "= MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the length of the projection of", "the gross azimuth and range offsets azb_avg = (az_offset[0] +", "use this file except in compliance with the License. #", "and position basis = BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c) return basis", "i in range(3): # Calculate the Baseline at the start", "self.vBaselineRate def getVBaselineAcc(self): return self.vBaselineAcc def getPBaselineTop(self): return self.pBaselineTop def", "multiple times to calculate the rate of baseline change over", "#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Copyright 2010 California Institute of Technology. ALL RIGHTS", "deltaT*normV masterSV = self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1", "given a master time and a slave time and a", "self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] # slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i in", "comply with all applicable U.S. export laws and regulations. #", "def getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset def getRangeOffset(self): return self.rangeOffset def getPhaseConst(self):", "and azimuth pixel sizes, starting ranges, # satellite heights and", "a method that calculates the baseline between #TODO frames when", "= MM.normalizeVector(c) v = MM.crossProduct(c,r) # Calculate a the \"velocity\"", "= Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return None def", "Calculate the baseline components between two frames def baseline(self): #TODO", "range and azimuth pixel sizes, starting ranges, # satellite heights", "= self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate the distance moved since the last", "velocity vector into a unit vector c = MM.crossProduct(r,v) #", "self.height = midSV.calculateHeight(ellipsoid) def addSlaveFrame(self): frame = self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame =", "def getLookAngle(self): return self.lookAngle def _timeDeltaToSeconds(self,td): return (td.microseconds + (td.seconds", "United States Government Sponsorship acknowledged. This software is subject to", "self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) # Save the position offset # Calculate a", "baseline components between two frames def baseline(self): #TODO This could", "midSV.getScalarVelocity()/prf try: ellipsoid = frame._ellipsoid #UAVSAR frame creates ellipsoid with", "need two orbits, a time, range and azimuth pixel sizes", "(td.microseconds + (td.seconds + td.days * 24.0 * 3600) *", "(az_offset[0] + az_offset[-1])/2.0 asb_avg = (asb[0] + asb[-1])/2.0 az_offset =", "except: z = 0.0 cosl = ((self.height-z)*(2*self.radius + self.height +", "[self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else: raise Exception('Unknown", "x1 = masterSV.getPosition() x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset)", "class to hold three-dimensional basis vectors for spacecraft baselines class", "frame self.startingRange1 = frame.getStartingRange() prf = frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize = frame.getInstrument().getRangePixelSize()", "export control laws and regulations and has been classified as", "= None self.startingRange1 = None self.startingRange2 = None self.hBaselineTop =", "the first lines masterFramePort = Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort)", "Calculate the Baseline at the start of the scene, mid-scene,", "range(size): for j in range(size): A[i] += MInv[i][j]*Y[j] return A", "in compliance with the License. # You may obtain a", "software # distributed under the License is distributed on an", "def calculateLookVector(self): try: z = self.masterFrame.terrainHeight except: z = 0.0", "datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower() == 'bottom': print('Estimating baselines at bottom of", "self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower() == 'top': print('Estimating baselines at", "frame self.startingRange2 = frame.getStartingRange() self.slaveOrbit = frame.getOrbit() def __init__(self, name=''):", "method #TODO multiple times to calculate the rate of baseline", "= pixelSize return def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize = pixelSize return def", "frame.getStartingRange() self.slaveOrbit = frame.getOrbit() def __init__(self, name=''): self.masterOrbit = None", "Port from isceobj.Util.mathModule import MathModule as MM from isceobj.Orbit.Orbit import", "self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] self.pBaselineTop", "estimates \\n\" retstr += \"Cross-track Baseline: %s\\n\" retlst = (self.hBaselineTop,)", "csb[0] self.pBaselineBottom = csb[-1] self.orbSlcAzimuthOffset = az_offset self.orbSlcRangeOffset = r_offset", "= (asb[0] + asb[-1])/2.0 az_offset = (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset", "datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else: raise Exception('Unknown baseline location:", "self.baselineLocation.lower() == 'top': print('Estimating baselines at top of master image')", "def setSlaveStartingRange(self,range): self.startingRange2 = range return def getHBaselineTop(self): return self.hBaselineTop", "class Basis(object): def __init__(self): self.x1 = [] self.x2 = []", "= None self.startingRange2 = None self.hBaselineTop = None self.hBaselineRate =", "def calculateBasisOffset(self,x1,x2,basis): dx = [(x2[j] - x1[j]) for j in", "this software to any 'EAR99' # embargoed foreign country or", "self.hBaselineTop = None self.hBaselineRate = None self.hBaselineAcc = None self.vBaselineTop", "polynomial def polynomialFit(self,xRef,yRef): size = len(xRef) if not (len(xRef) ==", "input vectors of same length.\") raise Exception if not (size", "time and a slave time and a method that calls", "from isceobj.Orbit.Orbit import StateVector # A class to hold three-dimensional", "provide the range and azimuth pixel sizes, starting ranges, #", "vector c = MM.normalizeVector(c) v = MM.crossProduct(c,r) # Calculate a", "the projection of the difference in position and the \"velocity\"", "# Turn the position vector into a unit vector v", "This could be further refactored into a method that calculates", "mid-scene, and the end of the scene # First, get", "range return def setSlaveStartingRange(self,range): self.startingRange2 = range return def getHBaselineTop(self):", "the look angle vector asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) #Calculating baseline crossTrackBaselinePolynomialCoefficients", "master image, '+ '\"bottom\" implies at bottom of master image,", "with the License. # You may obtain a copy of", "azb_avg = (az_offset[0] + az_offset[-1])/2.0 asb_avg = (asb[0] + asb[-1])/2.0", "image, '+ '\"middle\" implies near middle of master image. '+", "and velocity, this is the c, or cross-track vector c", "type=str, mandatory=False, doc = ('Location at which to compute baselines", "self.orbSlcAzimuthOffset = az_offset self.orbSlcRangeOffset = r_offset self.rangeOffset = self.startingRange1 -", "def __init__(self, name=''): self.masterOrbit = None self.slaveOrbit = None self.masterFrame", "= None self.lookAngle = None self.rangePixelSize = None self.azimuthPixelSize =", "# Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math import datetime import logging", "on the spacecraft position def calculateBasis(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite')", "datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower() == 'bottom': print('Estimating baselines", "None self.vBaselineAcc = None self.pBaselineTop = None self.pBaselineBottom = None", "setHeight(self,var): self.height = float(var) return def setRadius(self,radius): self.radius = radius", "compute baselines - \"all\" implies '+ 'top, middle, bottom of", "A class to hold three-dimensional basis vectors class Basis(object): def", "class to hold three-dimensional basis vectors class Basis(object): def __init__(self):", "self.optionalVariables = [] return None def createPorts(self): # Set input", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "creates ellipsoid with peg self.radius = ellipsoid.pegRadCur self.height = frame.platformHeight", "None self.hBaselineRate = None self.hBaselineAcc = None self.vBaselineTop = None", "# print('COSL: ', cosl) sinl = math.sqrt(1 - cosl*cosl) return", "component v_offset = MM.dotProduct(dx,basis.getPositionVector()) c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset #", "+= (self.pBaselineTop,) retstr += \"Bulk Azimuth Offset: %s\\n\" retlst +=", "vertical baseline components by the look angle vector asb.append(-hb[i]*lookVector[1] -", "and a slave time and a method that calls this", "center of master image') masterTime = [self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(),", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "License Required except when exporting to an embargoed country, #", "of those countries. # # Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math", "entire span of image for estimating baselines') masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()]", "between two frames def baseline(self): #TODO This could be further", "getPhaseConst(self): return self.phaseConst def getLookAngle(self): return self.lookAngle def _timeDeltaToSeconds(self,td): return", "CONDITIONS OF ANY KIND, either express or implied. # See", "frames when given a master time and a slave time", "and range offsets azb_avg = (az_offset[0] + az_offset[-1])/2.0 asb_avg =", "masterFramePort = Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return None", "__init__(self, name=''): self.masterOrbit = None self.slaveOrbit = None self.masterFrame =", "image, '+ '\"bottom\" implies at bottom of master image, '+", "None self.rangePixelSize = None self.azimuthPixelSize = None self.height = None", "getVBaselineAcc(self): return self.vBaselineAcc def getPBaselineTop(self): return self.pBaselineTop def getPBaselineBottom(self): return", "= [] csb = [] asb = [] s =", "angle vector asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) #Calculating baseline crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb)", "self.masterFrame = None self.slaveFrame = None self.lookAngle = None self.rangePixelSize", "crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2]", "scene self.logger.info(\"Sampling time %s\" % i) masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i]) normV", "crossTrackBaselinePolynomialCoefficients[1] # Calculate the gross azimuth and range offsets azb_avg", "return self.phaseConst def getLookAngle(self): return self.lookAngle def _timeDeltaToSeconds(self,td): return (td.microseconds", "ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius = ellipsoid.get_a() self.height = midSV.calculateHeight(ellipsoid) def", "r_offset self.rangeOffset = self.startingRange1 - self.startingRange2 # Calculate a quadratic", "This software is subject to # U.S. export control laws", "offsets azb_avg = (az_offset[0] + az_offset[-1])/2.0 asb_avg = (asb[0] +", "and velocity at the start of the scene self.logger.info(\"Sampling time", "same length.\") raise Exception if not (size == 3): print(\"Error.", "setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize = pixelSize return def setHeight(self,var): self.height = float(var)", "and the \"velocity\" component v_offset = MM.dotProduct(dx,basis.getPositionVector()) c_offset = MM.dotProduct(dx,basis.getCrossTrackVector())", "the scene self.logger.info(\"Sampling time %s\" % i) masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i])", "masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower() == 'middle': print('Estimating baselines around", "- vb[i]*lookVector[0]) #Calculating baseline crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb)", "orbit.interpolateOrbit(time, method='hermite') v = sv.getVelocity() normV = MM.norm(v) return normV", "\"Vertical Baseline: %s\\n\" retlst += (self.vBaselineTop,) retstr += \"Perpendicular Baseline:", "the user agrees to comply with all applicable U.S. export", "prf = frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize = frame.getInstrument().getRangePixelSize() self.masterOrbit = frame.getOrbit() midSV", "basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c) return basis # Given two position vectors", "of a prohibited end use). By downloading this software, #", "+ az_offset[-1])/2.0 asb_avg = (asb[0] + asb[-1])/2.0 az_offset = (-azb_avg", "(asb[0] + asb[-1])/2.0 az_offset = (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset =", "= sv.getVelocity() normV = MM.norm(v) return normV # Given an", "is perpendicular to the cross-track direction and position basis =", "- h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset = (self.startingRange1 - self.startingRange2 - asb_avg)/(self.rangePixelSize) #", "directions # based on the spacecraft position def calculateBasis(self,orbit,time): sv", "= crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] self.vBaselineRate =", "= \"Initial Baseline estimates \\n\" retstr += \"Cross-track Baseline: %s\\n\"", "None self.radius = None self.startingRange1 = None self.startingRange2 = None", "- self.startingRange2 # Calculate a quadratic fit to the baseline", "= self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame = frame self.startingRange1 = frame.getStartingRange() prf =", "import MathModule as MM from isceobj.Orbit.Orbit import StateVector # A", "# Calculate the gross azimuth and range offsets azb_avg =", "def calculateBasis(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') x1 = sv.getPosition() v", "export licenses, or other export # authority as may be", "the baseline polynomial def polynomialFit(self,xRef,yRef): size = len(xRef) if not", "to compute baselines - \"all\" implies '+ 'top, middle, bottom", "'\"middle\" implies near middle of master image. '+ 'To be", "logging from iscesys.Component.Component import Component, Port from isceobj.Util.mathModule import MathModule", "sv.getVelocity() r = MM.normalizeVector(x1) # Turn the position vector into", "= MM.norm(v) return normV # Given an orbit and a", "Sponsorship acknowledged. This software is subject to # U.S. export", "None self.orbSlcAzimuthOffset = None self.orbSlcRangeOffset = None self.rangeOffset = None", "self.x2 def setCrossTrackVector(self,c): self.x3 = c def getCrossTrackVector(self): return self.x3", "(len(xRef) == len(yRef)): print(\"Error. Expecting input vectors of same length.\")", "[self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower() ==", "Satisfy the old Component self.dictionaryOfOutputVariables = {} self.dictionaryOfVariables = {}", "raise Exception if not (size == 3): print(\"Error. Expecting input", "asb_avg = (asb[0] + asb[-1])/2.0 az_offset = (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize)", "method='hermite') x1 = sv.getPosition() v = sv.getVelocity() r = MM.normalizeVector(x1)", "all applicable U.S. export laws and regulations. # The user", "time. for port in self.inputPorts: port() lookVector = self.calculateLookVector() az_offset", "the start of the scene self.logger.info(\"Sampling time %s\" % i)", "the scene # First, get the position and velocity at", "the \"velocity\" component that is perpendicular to the cross-track direction", "parameter_list = (BASELINE_LOCATION,) # Calculate the Look Angle of the", "self.phaseConst def getLookAngle(self): return self.lookAngle def _timeDeltaToSeconds(self,td): return (td.microseconds +", "+ (td.seconds + td.days * 24.0 * 3600) * 10**6)", "self.masterFrame = frame self.startingRange1 = frame.getStartingRange() prf = frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize", "None self.azimuthPixelSize = None self.height = None self.radius = None", "# end user, or in support of a prohibited end", "Exception Y = [0]*size A = [0]*size M = [[0", "retlst = (self.hBaselineTop,) retstr += \"Vertical Baseline: %s\\n\" retlst +=", "self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1 = masterSV.getPosition() x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) =", "point if (i > 0): deltaT = self._timeDeltaToSeconds(masterTime[i] - masterTime[0])", "offset # Calculate a new start time relativeSlaveTime = slaveTime[i]", "ellipsoid with peg self.radius = ellipsoid.pegRadCur self.height = frame.platformHeight except:", "frame def calculateLookVector(self): try: z = self.masterFrame.terrainHeight except: z =", "Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return None def __str__(self):", "((self.height-z)*(2*self.radius + self.height + z) + self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius + self.height)", "a new start time relativeSlaveTime = slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV)) slaveSV", "master frame def calculateLookVector(self): try: z = self.masterFrame.terrainHeight except: z", "start of the scene, mid-scene, and the end of the", "'EAR99 NLR' # (No [Export] License Required except when exporting", "else: raise Exception('Unknown baseline location: {0}'.format(self.baselineLocation)) slaveTime = [self.slaveFrame.getSensingMid() -", "def getPhaseConst(self): return self.phaseConst def getLookAngle(self): return self.lookAngle def _timeDeltaToSeconds(self,td):", "csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) # Multiply the horizontal and vertical baseline", "# Recalculate the offsets x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis)", "self.radius = radius return def setMasterStartingRange(self,range): self.startingRange1 = range return", "asb[-1])/2.0 az_offset = (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset = (self.startingRange1 -", "frame.getOrbit() midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize = midSV.getScalarVelocity()/prf try: ellipsoid", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "%s\\n\" retlst += (self.vBaselineTop,) retstr += \"Perpendicular Baseline: %s\\n\" retlst", "= math.pow(xRef[j],i) Y[j] = yRef[j] MInv = MM.invertMatrix(M) for i", "def getPBaselineBottom(self): return self.pBaselineBottom def getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self):", "= az_offset self.orbSlcRangeOffset = r_offset self.rangeOffset = self.startingRange1 - self.startingRange2", "= MM.crossProduct(c,r) # Calculate a the \"velocity\" component that is", "of master image, '+ '\"middle\" implies near middle of master", "velocity directions # based on the spacecraft position def calculateBasis(self,orbit,time):", "vb[i]*lookVector[1]) # Multiply the horizontal and vertical baseline components by", "'\"top\" implies near start of master image, '+ '\"bottom\" implies", "def calculateScalarVelocity(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') v = sv.getVelocity() normV", "+ self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius + self.height) ) # print('Height: ', self.height)", "return self.orbSlcRangeOffset def getRangeOffset(self): return self.rangeOffset def getPhaseConst(self): return self.phaseConst", "master image. '+ 'To be used in case there is", "Baseline(Component): family = 'baseline' logging_name = 'isce.mroipac.baseline' parameter_list = (BASELINE_LOCATION,)", "[] asb = [] s = [0.,0.,0.] if self.baselineLocation.lower() ==", "', self.radius) # print('Range: ', self.startingRange1) # print('COSL: ', cosl)", "master and slave position vectors z_offset = MM.dotProduct(dx,basis.getVelocityVector()) # Calculate", "= self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) # Multiply the", "asb = [] s = [0.,0.,0.] if self.baselineLocation.lower() == 'all':", "is subject to # U.S. export control laws and regulations", "return def setHeight(self,var): self.height = float(var) return def setRadius(self,radius): self.radius", "return def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize = pixelSize return def setHeight(self,var): self.height", "position vectors z_offset = MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the length of", "retlst += (self.vBaselineTop,) retstr += \"Perpendicular Baseline: %s\\n\" retlst +=", "Component, Port from isceobj.Util.mathModule import MathModule as MM from isceobj.Orbit.Orbit", "or in support of a prohibited end use). By downloading", "hb = [] csb = [] asb = [] s", "Version 2.0 (the \"License\"); # you may not use this", "super(Baseline, self).__init__(family=self.__class__.family, name=name) self.logger = logging.getLogger('isce.mroipac.baseline') self.createPorts() # Satisfy the", "c = MM.crossProduct(r,v) # Calculate the vector perpendicular to the", "self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize = midSV.getScalarVelocity()/prf try: ellipsoid = frame._ellipsoid #UAVSAR", "# Calculate the baseline components between two frames def baseline(self):", "could be further refactored into a method that calculates the", "self.orbSlcRangeOffset = None self.rangeOffset = None self.phaseConst = -99999 super(Baseline,", "input vectors of length 3.\") raise Exception Y = [0]*size", "has been classified as 'EAR99 NLR' # (No [Export] License", "range offsets azb_avg = (az_offset[0] + az_offset[-1])/2.0 asb_avg = (asb[0]", "getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset def getRangeOffset(self): return self.rangeOffset def getPhaseConst(self): return", "basis vectors class Basis(object): def __init__(self): self.x1 = [] self.x2", "+= \"Vertical Baseline: %s\\n\" retlst += (self.vBaselineTop,) retstr += \"Perpendicular", "= [self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else: raise", "= (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset = (self.startingRange1 - self.startingRange2 -", "get the position and velocity at the start of the", "and a basis, calculate the offset between the two positions", "= yRef[j] MInv = MM.invertMatrix(M) for i in range(size): for", "by applicable law or agreed to in writing, software #", "and regulations. # The user has the responsibility to obtain", "self.startingRange2 # Calculate a quadratic fit to the baseline polynomial", "positions in this basis def calculateBasisOffset(self,x1,x2,basis): dx = [(x2[j] -", "= frame.getOrbit() def __init__(self, name=''): self.masterOrbit = None self.slaveOrbit =", "+ self.height + z) + self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius + self.height) )", "= None self.hBaselineAcc = None self.vBaselineTop = None self.vBaselineRate =", "None self.hBaselineAcc = None self.vBaselineTop = None self.vBaselineRate = None", "+ vb[i]*lookVector[1]) # Multiply the horizontal and vertical baseline components", "user, or in support of a prohibited end use). By", "datetime import logging from iscesys.Component.Component import Component, Port from isceobj.Util.mathModule", "raise Exception('Unknown baseline location: {0}'.format(self.baselineLocation)) slaveTime = [self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0),", "z_offset,v_offset,c_offset # Calculate the baseline components between two frames def", "= len(xRef) if not (len(xRef) == len(yRef)): print(\"Error. Expecting input", "+= \"Cross-track Baseline: %s\\n\" retlst = (self.hBaselineTop,) retstr += \"Vertical", "unit vector c = MM.crossProduct(r,v) # Calculate the vector perpendicular", "= None self.phaseConst = -99999 super(Baseline, self).__init__(family=self.__class__.family, name=name) self.logger =", "the baseline components between two frames def baseline(self): #TODO This", "- datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower() == 'top':", "master image, '+ '\"middle\" implies near middle of master image.", "foreign country or citizen of those countries. # # Author:", "method that calculates the baseline between #TODO frames when given", "vectors z_offset = MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the length of the", "in this basis def calculateBasisOffset(self,x1,x2,basis): dx = [(x2[j] - x1[j])", "self.logger = logging.getLogger('isce.mroipac.baseline') self.createPorts() # Satisfy the old Component self.dictionaryOfOutputVariables", "applicable law or agreed to in writing, software # distributed", "orbits # These provide the range and azimuth pixel sizes,", "', cosl) sinl = math.sqrt(1 - cosl*cosl) return [cosl,sinl] #", "that calls this method #TODO multiple times to calculate the", "radius return def setMasterStartingRange(self,range): self.startingRange1 = range return def setSlaveStartingRange(self,range):", "self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower() == 'top': print('Estimating baselines", "position def calculateBasis(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') x1 = sv.getPosition()", "self.calculateLookVector() az_offset = [] vb = [] hb = []", "[] vb = [] hb = [] csb = []", "[0]*size M = [[0 for i in range(size) ] for", "slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return None def __str__(self): retstr", "refactored into a method that calculates the baseline between #TODO", "These provide the orbits # These provide the range and", "master image') masterTime = [self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0),", "based on the spacecraft position def calculateBasis(self,orbit,time): sv = orbit.interpolateOrbit(time,", "getVBaselineRate(self): return self.vBaselineRate def getVBaselineAcc(self): return self.vBaselineAcc def getPBaselineTop(self): return", "MInv = MM.invertMatrix(M) for i in range(size): for j in", "velocity def calculateScalarVelocity(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') v = sv.getVelocity()", "self.masterOrbit = frame.getOrbit() midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize = midSV.getScalarVelocity()/prf", "California Institute of Technology. ALL RIGHTS RESERVED. # # Licensed", "Baseline estimates \\n\" retstr += \"Cross-track Baseline: %s\\n\" retlst =", "range(size)] for j in range(size): for i in range(size): M[j][i]", "= slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1])", "# You may obtain a copy of the License at", "elif self.baselineLocation.lower() == 'middle': print('Estimating baselines around center of master", "Calculate the look vector of the master frame def calculateLookVector(self):", "and slave position vectors z_offset = MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the", "= -99999 super(Baseline, self).__init__(family=self.__class__.family, name=name) self.logger = logging.getLogger('isce.mroipac.baseline') self.createPorts() #", "bottom of master image') masterTime = [self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop()", "# Turn the velocity vector into a unit vector c", "def getRangeOffset(self): return self.rangeOffset def getPhaseConst(self): return self.phaseConst def getLookAngle(self):", "def calculateLookAngle(self): lookVector = self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate the", "v = sv.getVelocity() normV = MM.norm(v) return normV # Given", "with peg self.radius = ellipsoid.pegRadCur self.height = frame.platformHeight except: ellipsoid", "masterTime = [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] elif", "since the last baseline point if (i > 0): deltaT", "= {} self.dictionaryOfVariables = {} self.descriptionOfVariables = {} self.mandatoryVariables =", "baseline crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) h_rate = crossTrackBaselinePolynomialCoefficients[1]", "None def createPorts(self): # Set input ports # It looks", "c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset # Calculate the baseline components", "length of the projection of the difference in position and", "MM.dotProduct(dx,basis.getPositionVector()) c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset # Calculate the baseline", "Set input ports # It looks like we really need", "MM.norm(v) return normV # Given an orbit and a time,", "self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) # Multiply the horizontal", "to hold three-dimensional basis vectors for spacecraft baselines class BaselineBasis(Basis):", "in range(size) ] for j in range(size)] for j in", "print('Estimating baselines at top of master image') masterTime = [self.masterFrame.getSensingStart(),", "looks like we really need two orbits, a time, range", "- datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] # slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()]", "self.azimuthPixelSize = None self.height = None self.radius = None self.startingRange1", "master image, '+ '\"top\" implies near start of master image,", "= frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius = ellipsoid.get_a() self.height = midSV.calculateHeight(ellipsoid) def addSlaveFrame(self):", "return def setSlaveStartingRange(self,range): self.startingRange2 = range return def getHBaselineTop(self): return", "required before exporting this software to any 'EAR99' # embargoed", "look vector of the master frame def calculateLookVector(self): try: z", "midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize = midSV.getScalarVelocity()/prf try: ellipsoid =", "[self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower() ==", "- asb_avg)/(self.rangePixelSize) # Populate class attributes self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate", "\"License\"); # you may not use this file except in", "method='hermite') slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1 = masterSV.getPosition() x2 =", "= frame.getStartingRange() self.slaveOrbit = frame.getOrbit() def __init__(self, name=''): self.masterOrbit =", "# The user has the responsibility to obtain export licenses,", "v def getVelocityVector(self): return self.x2 def setCrossTrackVector(self,c): self.x3 = c", "vector into a unit vector v = MM.normalizeVector(v) # Turn", "return self.hBaselineRate def getHBaselineAcc(self): return self.hBaselineAcc def getVBaselineTop(self): return self.vBaselineTop", "# It looks like we really need two orbits, a", "two starting ranges, a planet, and the two prfs #", "user has the responsibility to obtain export licenses, or other", "setRangePixelSize(self,pixelSize): self.rangePixelSize = pixelSize return def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize = pixelSize", "RIGHTS RESERVED. # # Licensed under the Apache License, Version", "# A class to hold three-dimensional basis vectors for spacecraft", "def addSlaveFrame(self): frame = self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame = frame self.startingRange2 =", "'bottom': print('Estimating baselines at bottom of master image') masterTime =", "- cosl*cosl) return [cosl,sinl] # Calculate the scalar spacecraft velocity", "addSlaveFrame(self): frame = self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame = frame self.startingRange2 = frame.getStartingRange()", "# These provide the range and azimuth pixel sizes, starting", "the master frame def calculateLookVector(self): try: z = self.masterFrame.terrainHeight except:", "getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset def getRangeOffset(self): return", "# Given two position vectors and a basis, calculate the", "Calculate a quadratic fit to the baseline polynomial def polynomialFit(self,xRef,yRef):" ]
[ "import Card class Deck: \"\"\" Class representing a deck. The", "rank in Card.STR_RANKS: for suit, val in Card.CHAR_SUIT_TO_INT_SUIT.items(): Deck._FULL_DECK.append(Card.new(rank +", "this object and shuffles it. \"\"\" _FULL_DECK = [] def", "i in range(n): cards.append(self.draw()) return cards def __str__(self): return Card.print_pretty_cards(self.cards)", "representing a deck. The first time we create, we seed", "static deck with the list of unique card integers. Each", "object instantiated simply makes a copy of this object and", "rshuffle(self.cards) def draw(self, n=1): if n == 1: return self.cards.pop(0)", "<filename>src/modules/deuces/deck.py from random import shuffle as rshuffle from .card import", "card integers. Each object instantiated simply makes a copy of", "makes a copy of this object and shuffles it. \"\"\"", "return cards def __str__(self): return Card.print_pretty_cards(self.cards) @staticmethod def GetFullDeck(): if", "with the list of unique card integers. Each object instantiated", "GetFullDeck(): if Deck._FULL_DECK: return list(Deck._FULL_DECK) # create the standard 52", "deck with the list of unique card integers. Each object", "import shuffle as rshuffle from .card import Card class Deck:", "__init__(self): self.shuffle() def shuffle(self): # and then shuffle self.cards =", "__str__(self): return Card.print_pretty_cards(self.cards) @staticmethod def GetFullDeck(): if Deck._FULL_DECK: return list(Deck._FULL_DECK)", "cards.append(self.draw()) return cards def __str__(self): return Card.print_pretty_cards(self.cards) @staticmethod def GetFullDeck():", "\"\"\" Class representing a deck. The first time we create,", "shuffle as rshuffle from .card import Card class Deck: \"\"\"", "Class representing a deck. The first time we create, we", "standard 52 card deck for rank in Card.STR_RANKS: for suit,", "for suit, val in Card.CHAR_SUIT_TO_INT_SUIT.items(): Deck._FULL_DECK.append(Card.new(rank + suit)) return list(Deck._FULL_DECK)", "range(n): cards.append(self.draw()) return cards def __str__(self): return Card.print_pretty_cards(self.cards) @staticmethod def", "we seed the static deck with the list of unique", "we create, we seed the static deck with the list", "shuffles it. \"\"\" _FULL_DECK = [] def __init__(self): self.shuffle() def", "copy of this object and shuffles it. \"\"\" _FULL_DECK =", "Deck: \"\"\" Class representing a deck. The first time we", "Card.STR_RANKS: for suit, val in Card.CHAR_SUIT_TO_INT_SUIT.items(): Deck._FULL_DECK.append(Card.new(rank + suit)) return", "then shuffle self.cards = Deck.GetFullDeck() rshuffle(self.cards) def draw(self, n=1): if", "the standard 52 card deck for rank in Card.STR_RANKS: for", "Card class Deck: \"\"\" Class representing a deck. The first", "== 1: return self.cards.pop(0) cards = [] for i in", "deck for rank in Card.STR_RANKS: for suit, val in Card.CHAR_SUIT_TO_INT_SUIT.items():", "def shuffle(self): # and then shuffle self.cards = Deck.GetFullDeck() rshuffle(self.cards)", "for rank in Card.STR_RANKS: for suit, val in Card.CHAR_SUIT_TO_INT_SUIT.items(): Deck._FULL_DECK.append(Card.new(rank", "unique card integers. Each object instantiated simply makes a copy", "shuffle self.cards = Deck.GetFullDeck() rshuffle(self.cards) def draw(self, n=1): if n", "1: return self.cards.pop(0) cards = [] for i in range(n):", "52 card deck for rank in Card.STR_RANKS: for suit, val", "from random import shuffle as rshuffle from .card import Card", "Each object instantiated simply makes a copy of this object", "and shuffles it. \"\"\" _FULL_DECK = [] def __init__(self): self.shuffle()", "the list of unique card integers. Each object instantiated simply", "Card.print_pretty_cards(self.cards) @staticmethod def GetFullDeck(): if Deck._FULL_DECK: return list(Deck._FULL_DECK) # create", "Deck._FULL_DECK: return list(Deck._FULL_DECK) # create the standard 52 card deck", "The first time we create, we seed the static deck", "simply makes a copy of this object and shuffles it.", "self.shuffle() def shuffle(self): # and then shuffle self.cards = Deck.GetFullDeck()", "def GetFullDeck(): if Deck._FULL_DECK: return list(Deck._FULL_DECK) # create the standard", "create the standard 52 card deck for rank in Card.STR_RANKS:", "cards def __str__(self): return Card.print_pretty_cards(self.cards) @staticmethod def GetFullDeck(): if Deck._FULL_DECK:", "list(Deck._FULL_DECK) # create the standard 52 card deck for rank", "return self.cards.pop(0) cards = [] for i in range(n): cards.append(self.draw())", "cards = [] for i in range(n): cards.append(self.draw()) return cards", "# create the standard 52 card deck for rank in", "= Deck.GetFullDeck() rshuffle(self.cards) def draw(self, n=1): if n == 1:", "a deck. The first time we create, we seed the", "shuffle(self): # and then shuffle self.cards = Deck.GetFullDeck() rshuffle(self.cards) def", "self.cards = Deck.GetFullDeck() rshuffle(self.cards) def draw(self, n=1): if n ==", "seed the static deck with the list of unique card", "from .card import Card class Deck: \"\"\" Class representing a", "in range(n): cards.append(self.draw()) return cards def __str__(self): return Card.print_pretty_cards(self.cards) @staticmethod", "@staticmethod def GetFullDeck(): if Deck._FULL_DECK: return list(Deck._FULL_DECK) # create the", "return list(Deck._FULL_DECK) # create the standard 52 card deck for", "of this object and shuffles it. \"\"\" _FULL_DECK = []", "the static deck with the list of unique card integers.", "as rshuffle from .card import Card class Deck: \"\"\" Class", "Deck.GetFullDeck() rshuffle(self.cards) def draw(self, n=1): if n == 1: return", "time we create, we seed the static deck with the", "rshuffle from .card import Card class Deck: \"\"\" Class representing", "[] def __init__(self): self.shuffle() def shuffle(self): # and then shuffle", "if n == 1: return self.cards.pop(0) cards = [] for", "class Deck: \"\"\" Class representing a deck. The first time", "\"\"\" _FULL_DECK = [] def __init__(self): self.shuffle() def shuffle(self): #", "in Card.STR_RANKS: for suit, val in Card.CHAR_SUIT_TO_INT_SUIT.items(): Deck._FULL_DECK.append(Card.new(rank + suit))", "for i in range(n): cards.append(self.draw()) return cards def __str__(self): return", "def __str__(self): return Card.print_pretty_cards(self.cards) @staticmethod def GetFullDeck(): if Deck._FULL_DECK: return", "object and shuffles it. \"\"\" _FULL_DECK = [] def __init__(self):", "and then shuffle self.cards = Deck.GetFullDeck() rshuffle(self.cards) def draw(self, n=1):", ".card import Card class Deck: \"\"\" Class representing a deck.", "return Card.print_pretty_cards(self.cards) @staticmethod def GetFullDeck(): if Deck._FULL_DECK: return list(Deck._FULL_DECK) #", "draw(self, n=1): if n == 1: return self.cards.pop(0) cards =", "create, we seed the static deck with the list of", "deck. The first time we create, we seed the static", "it. \"\"\" _FULL_DECK = [] def __init__(self): self.shuffle() def shuffle(self):", "n=1): if n == 1: return self.cards.pop(0) cards = []", "a copy of this object and shuffles it. \"\"\" _FULL_DECK", "_FULL_DECK = [] def __init__(self): self.shuffle() def shuffle(self): # and", "= [] for i in range(n): cards.append(self.draw()) return cards def", "def draw(self, n=1): if n == 1: return self.cards.pop(0) cards", "first time we create, we seed the static deck with", "# and then shuffle self.cards = Deck.GetFullDeck() rshuffle(self.cards) def draw(self,", "if Deck._FULL_DECK: return list(Deck._FULL_DECK) # create the standard 52 card", "of unique card integers. Each object instantiated simply makes a", "[] for i in range(n): cards.append(self.draw()) return cards def __str__(self):", "self.cards.pop(0) cards = [] for i in range(n): cards.append(self.draw()) return", "= [] def __init__(self): self.shuffle() def shuffle(self): # and then", "card deck for rank in Card.STR_RANKS: for suit, val in", "list of unique card integers. Each object instantiated simply makes", "n == 1: return self.cards.pop(0) cards = [] for i", "integers. Each object instantiated simply makes a copy of this", "instantiated simply makes a copy of this object and shuffles", "random import shuffle as rshuffle from .card import Card class", "def __init__(self): self.shuffle() def shuffle(self): # and then shuffle self.cards" ]
[ "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A", "just remove this token. self._delete_token(self.last_returned_uuid) return None refresh_result = self._refresh_token(self.last_returned_uuid)", "= urlencode(rquery) authz_url = '%s?%s' % (self._idp_url('Authorization'), query) print('Please visit", ":rtype: (string, dict) or None :returns: Token UUID and contents", "the user or expired. In that case, you will want", "__write_cache(self): \"\"\"Wirtes self._cache to cache on disk. Requires cache_lock to", "for the requested method. :param method: The method name in", "\"\"\"Make an python-requests POST request. Allarguments and keyword arguments are", "token that was returned. This will attempt to renew the", "the requested scopes. It will prefer to return tokens whose", "token, now comes the hard part... uuid = self._get_new_token(scopes) if", "filesystem lock so we also lock across # multiple invocations", "openidc_client import release # The ports that we will try", "if not self.token_to_try: return resp return self.send_request(*args, **kwargs) elif resp.status_code", "server = simple_server.make_server('0.0.0.0', port, app) return server except socket.error: #", ":returns: True if the token was succesfully refreshed, False otherwise", "'POST', self._idp_url('Token'), data=data) resp.raise_for_status() self._retrieved_code = None resp = resp.json()", "for interacting with web services relying on OpenID Connect. :param", "uuid, toupdate.keys()) with self._cache_lock: self.__refresh_cache() if uuid not in self._cache:", "not set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope: %s not subset of %s', set(scopes),", "permission notice shall be included in # all copies or", "kv: self.debug('Error code returned: %s (%s)', kv['error'], kv.get('error_description')) self._retrieved_code =", "IdP does not require you to use a secret. :kwarg", "the cache. cache_lock may not be held by anyone. :param", "The directory in which to store the token caches. Will", "raise ValueError('Cannot use POST tokens in %s method' % method)", "portions of the Software. # # THE SOFTWARE IS PROVIDED", "def __write_cache(self): \"\"\"Wirtes self._cache to cache on disk. Requires cache_lock", "not still be valid self.debug('Possible') possible_token = (uuid, token) if", "since if we already have a token with the current", "If it did not work, we will return None and", "token in the local cache, add it to the valid", "# TODO: Make cache_lock a filesystem lock so we also", "token %s to cache', uuid) with self._cache_lock: self.__refresh_cache() self._cache[uuid] =", "client_id. May be None if your IdP does not require", "required for this call. If a token is not present", "remove this token from the cache. If you get an", "still be valid, prefer it # over any others we", "from urllib import urlencode except ImportError: from urllib.parse import urlencode", "from io import StringIO import socket import os try: from", "urllib.parse import urlencode from uuid import uuid4 as uuidgen import", "on OpenID Connect for authentication.\"\"\" from __future__ import print_function from", "Inc. # Red Hat Author: <NAME> <<EMAIL>> # # Permission", "= resp.json() if 'error' in resp: self.debug('Error exchanging authorization code:", "token %s', uuid) data = {'client_id': self.client_id, 'grant_type': 'refresh_token', 'refresh_token':", "webbrowser.open(authz_url) server.handle_request() server.server_close() assert self._retrieved_code is not None if self._retrieved_code", "Software without restriction, including without limitation the rights # to", "new access token. If it did not work, we will", "their browser. This URL will be the Authorization endpoint of", "of retrieved tokens :param id_provider: URL of the identity provider", "the client_id. May be None if your IdP does not", "for an access token) or the cancellation message. This function", "# We got a 401 and this is a retry.", "UUID and contents or None if no applicable tokens were", "the return with either an Authorization Code (that we will", "call it. You should explicitly NOT call this function if", "distribute, sublicense, and/or sell # copies of the Software, and", "%s to cache', uuid) with self._cache_lock: self.__refresh_cache() self._cache[uuid] = token", "= requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() self._retrieved_code = None resp", "import Lock import time try: from StringIO import StringIO except", "that owns the token # access_token: Token value # token_type:", "directory in which to store the token caches. Will be", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #", "method = ckwargs.pop('http_method', 'POST') is_retry = False if self.token_to_try: is_retry", "(C) 2016, 2017 Red Hat, Inc. # Red Hat Author:", "*args, **kwargs): \"\"\"Make an python-requests POST request. Allarguments and keyword", "included in # all copies or substantial portions of the", "self.debug('Error code returned: %s (%s)', kv['error'], kv.get('error_description')) self._retrieved_code = False", "_get_server(self, app): \"\"\"This function returns a SimpleServer with an available", "have on. :rtype: string or None :returns: String bearer token", "printfd=sys.stdout): \"\"\"Client for interacting with web services relying on OpenID", "token to be removed from cache \"\"\" self.debug('Removing token %s", "expires_at # indicates the token should still be valid, it", "self.debug('Refreshed until %s', self._cache[uuid]['expires_at']) return True def _get_server(self, app): \"\"\"This", "query) print('Please visit %s to grant authorization' % authz_url, file=self._printfd)", "ckwargs['headers']['Authorization'] = 'Bearer %s' % token resp = requests.request(method, *args,", "contains uuid: token pairs # Every \"token\" object is a", "expires_at: Token expiration UTC time. NOTE: Even if the expires_at", "The Client Identifier used to request credentials :kwarg client_secret: The", "None if self._retrieved_code is False: # The user cancelled the", "token with scopes %s', scopes) for uuid in self._cache: self.debug('Checking", "name for the current client. This assures that whenever this", "that self.last_returned_uuid = token[0] self.problem_reported = False return token[1]['access_token'] elif", "authentication.\"\"\" from __future__ import print_function from copy import copy import", "uuid4 as uuidgen import webbrowser from wsgiref import simple_server import", "a token with the current app_identifier that has the required", "valid, it may have been revoked by # the user!", "{'access_token': resp['access_token'], 'token_type': resp['token_type'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + resp['expires_in']})", "to. \"\"\" self.logger = logging.getLogger(__name__) self.debug = self.logger.debug self.app_id =", "if uuid not in self._cache: return None self._cache[uuid].update(toupdate) self.__write_cache() return", "split]) if 'error' in kv: self.debug('Error code returned: %s (%s)',", "and keyword arguments are like the arguments to requests, except", "{'access_token': resp['access_token'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + int(resp['expires_in']), 'idp': self.idp,", "% token resp = requests.request(method, *args, **ckwargs) if resp.status_code ==", "of tokens: # Every app id has its own token", "token or None. Note that the bearer token might have", "# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "self.useragent = useragent or 'python-openid-client/%s' % \\ release.VERSION self.cachedir =", "fields need to be updated \"\"\" self.debug('Updating token %s in", "if uuid: self.last_returned_uuid = uuid self.problem_reported = False return self._cache[uuid]['access_token']", "' '.join(scopes) rquery['response_type'] = 'code' rquery['client_id'] = self.client_id rquery['redirect_uri'] =", "you should call it. You should explicitly NOT call this", "_cachefile(self): \"\"\"Property to get the cache file name for the", "token was lacking specific permissions. \"\"\" if not self.last_returned_uuid: raise", "import requests import sys from openidc_client import release # The", "self._cache_lock.locked() return os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id) def __refresh_cache(self): \"\"\"Refreshes the", "if the token was valid but your request failed due", "function will return a bearer token or None. Note that", "\"\"\"Client for interacting with web services relying on OpenID Connect.", "if no applicable tokens were found \"\"\" possible_token = None", "Lock import time try: from StringIO import StringIO except ImportError:", "be held by anyone. :param uuid: UUID of the token", "returned. This will attempt to renew the token that was", "a message start_response('200 OK', [('Content-Type', 'text/plain')]) return [u'You can close", "to the cache \"\"\" uuid = uuidgen().hex self.debug('Adding token %s", "returned. If that worked, we will return the new access", "removed from cache \"\"\" self.debug('Removing token %s from cache', uuid)", "self.debug('Unable to refresh, error: %s', resp['error']) return False self._update_token( uuid,", "if possible_token: self.debug('Returning possible token') return possible_token def _idp_url(self, method):", "a copy # of this software and associated documentation files", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF", "new_token: return None # We did not have a valid", "from urllib.parse import urlencode from uuid import uuid4 as uuidgen", "if the expires_at # indicates the token should still be", "was succesfully refreshed, False otherwise \"\"\" oldtoken = self._cache[uuid] self.debug('Refreshing", "json in a post call') if method not in ['POST']:", "new_token=new_token) if not token: return None if self.use_post: if 'json'", "responsible for either removing the token if it could not", "notice shall be included in # all copies or substantial", "has expired, we might still be able to # refresh", "but your request failed due to a server error or", "yet expired, returning') return uuid, token # This is a", "import copy import json import logging from threading import Lock", "not None if self._retrieved_code is False: # The user cancelled", "continue def _get_new_token(self, scopes): \"\"\"This function kicks off some magic.", "and this permission notice shall be included in # all", "# Just return a message start_response('200 OK', [('Content-Type', 'text/plain')]) return", "sys from openidc_client import release # The ports that we", "returns a SimpleServer with an available WEB_PORT.\"\"\" for port in", "# expires_at: Token expiration UTC time. NOTE: Even if the", "list): raise ValueError('Scopes must be a list') token = self._get_token_with_scopes(scopes)", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "following keys: # idp: The URL of the idp that", "Connect for authentication.\"\"\" from __future__ import print_function from copy import", "IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS", "to POST.. \"\"\" ckwargs = copy(kwargs) scopes = ckwargs.pop('scopes') new_token", "will want to call report_token_issue() to try to renew the", "f: f.write(json.dumps({})) with open(self._cachefile, 'r') as f: self._cache = json.loads(f.read())", "the expires_at # indicates the token should still be valid,", "to attempt to refresh. :rtype: bool :returns: True if the", "def _update_token(self, uuid, toupdate): \"\"\"Updates a token in the cache.", "close this window and return to the CLI'.encode('ascii')] self._retrieved_code =", "= self.get_token(scopes, new_token=new_token) if not token: return None if self.use_post:", "in self._cache: return None self._cache[uuid].update(toupdate) self.__write_cache() return uuid def _delete_token(self,", "across # multiple invocations self._cache_lock = Lock() with self._cache_lock: self.__refresh_cache()", "does not exist and we are unable to create it,", "due to a server error or because the account or", "a token is retrieved if requested. It is always safe", ":kwargs http_method: The HTTP method to use, defaults to POST..", "be valid, prefer it # over any others we have", "error with the last token that was returned. This will", "valid, prefer it # over any others we have self.debug('Not", "will return None. \"\"\" def _token_app(environ, start_response): query = environ['QUERY_STRING']", "secret. :kwarg useragent: Useragent string to use. If not provided,", "then either show the user a URL, or if possible,", "token: return None if self.use_post: if 'json' in ckwargs: raise", "had requested with the token def __init__(self, app_identifier, id_provider, id_provider_mapping,", "current app_identifier that has the required scopes, we will return", "get a new token with the current scopeset if we", "This helps with broken apps that may send a 401", "tokens', len(self._cache)) with open(self._cachefile, 'w') as f: f.write(json.dumps(self._cache)) def _add_token(self,", "have self.debug('Not yet expired, returning') return uuid, token # This", "'grant_type': 'refresh_token', 'refresh_token': oldtoken['refresh_token']} if self.client_secret: data['client_secret'] = self.client_secret resp", "new token with the current scopeset if we do not", "the rights # to use, copy, modify, merge, publish, distribute,", "self.problem_reported = False self.token_to_try = None self._retrieved_code = None #", "= token self.__write_cache() return uuid def _update_token(self, uuid, toupdate): \"\"\"Updates", "so we also lock across # multiple invocations self._cache_lock =", "an access token) or the cancellation message. This function will", "held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Refreshing cache') if not os.path.isdir(self.cachedir):", "self._cache The caller is responsible for either removing the token", "were found \"\"\" possible_token = None self.debug('Trying to get token", "sub: The subject that owns the token # access_token: Token", "\"\"\" if not isinstance(scopes, list): raise ValueError('Scopes must be a", "\"\"\" assert self._cache_lock.locked() return os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id) def __refresh_cache(self):", "if your IdP does not require you to use a", "a token that may or may not still be valid", "not work. Switch to next one continue def _get_new_token(self, scopes):", "saving the cache if renewal was succesful. :param uuid: The", "\"\"\"Client for applications relying on OpenID Connect for authentication.\"\"\" from", "is hereby granted, free of charge, to any person obtaining", "id_provider: URL of the identity provider to get tokens from", "return None refresh_result = self._refresh_token(self.last_returned_uuid) if not refresh_result: self._delete_token(self.last_returned_uuid) return", "cases. :kwargs http_method: The HTTP method to use, defaults to", "printfd: The File object to print token instructions to. \"\"\"", "specified scopes. The webserver will then need to catch the", "add it to the valid cache, and then return the", "method not in ['POST']: raise ValueError('Cannot use POST tokens in", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR", "token to the cache and writes cache to disk. cache_lock", "def get_token(self, scopes, new_token=True): \"\"\"Function to retrieve tokens with specific", "Note that the bearer token might have been revoked by", "'grant_type': 'authorization_code', 'redirect_uri': return_uri, 'code': self._retrieved_code} if self.client_secret: data['client_secret'] =", "it has expired, we might still be able to #", "authorization code: %s', resp['error']) return None token = {'access_token': resp['access_token'],", "person obtaining a copy # of this software and associated", "'headers' not in ckwargs: ckwargs['headers'] = {} ckwargs['headers']['Authorization'] = 'Bearer", "*args, **ckwargs) if resp.status_code == 401 and not is_retry: if", "POST request. Allarguments and keyword arguments are like the arguments", "import time try: from StringIO import StringIO except ImportError: from", "tokens: # Every app id has its own token cache", "revoked by # the user! Also, even if it has", "self.__refresh_cache() def __write_cache(self): \"\"\"Wirtes self._cache to cache on disk. Requires", "for the current client. This assures that whenever this file", "\"\"\"Function to retrieve tokens with specific scopes. This function will", "to use POST submission of client secrets rather than Authorization", "unable to instantiate a webserver') return_uri = 'http://localhost:%i/' % server.socket.getsockname()[1]", "with URLs to use for specific endpoints on the IdP.", "token if possible or None \"\"\" if not isinstance(scopes, list):", "useragent or 'python-openid-client/%s' % \\ release.VERSION self.cachedir = os.path.expanduser(cachedir or", "the user! Also, even if it has expired, we might", "access_token: Token value # token_type: Token type. Currently supported: \"Bearer\"", "may not still be valid self.debug('Possible') possible_token = (uuid, token)", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "token :rtype: (string, dict) or None :returns: Token UUID and", "return None if self.use_post: if 'json' in ckwargs: raise ValueError('Cannot", "we also lock across # multiple invocations self._cache_lock = Lock()", "= useragent or 'python-openid-client/%s' % \\ release.VERSION self.cachedir = os.path.expanduser(cachedir", "a URL, or if possible, kick off their browser. This", "= 'query' query = urlencode(rquery) authz_url = '%s?%s' % (self._idp_url('Authorization'),", "# Permission is hereby granted, free of charge, to any", "useragent: Useragent string to use. If not provided, defaults to", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #", "client_id self.client_secret = client_secret self.useragent = useragent or 'python-openid-client/%s' %", "authorization' % authz_url, file=self._printfd) webbrowser.open(authz_url) server.handle_request() server.server_close() assert self._retrieved_code is", "to use for specific endpoints on the IdP. :kwarg use_post:", "\"\"\" ckwargs = copy(kwargs) scopes = ckwargs.pop('scopes') new_token = ckwargs.pop('new_token',", "= 'http://localhost:%i/' % server.socket.getsockname()[1] rquery = {} rquery['scope'] = '", "will actively request the user to get a new token", "tokens in %s method' % method) if 'data' not in", "resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() self._retrieved_code = None", "except socket.error: # This port did not work. Switch to", "self.idp = id_provider self.idp_mapping = id_provider_mapping self.client_id = client_id self.client_secret", "removing the token if it could not be refreshed or", "cache, add it to the valid cache, and then return", "self.last_returned_uuid = None self.problem_reported = False self.token_to_try = None self._retrieved_code", "requested unless nonblocking is True. :kwarg new_token: If True, we", "= copy(kwargs) scopes = ckwargs.pop('scopes') new_token = ckwargs.pop('new_token', True) auto_refresh", "= app_identifier self.use_post = use_post self.idp = id_provider self.idp_mapping =", "the current client. This assures that whenever this file is", "# # Copyright (C) 2016, 2017 Red Hat, Inc. #", "self._retrieved_code = None # TODO: Make cache_lock a filesystem lock", "it could not be refreshed or saving the cache if", "token) or the cancellation message. This function will store the", "= False self.token_to_try = None self._retrieved_code = None # TODO:", "scopes: A list of scopes that we had requested with", "token to attempt to refresh. :rtype: bool :returns: True if", "not auto_refresh: return resp self.token_to_try = self.report_token_issue() if not self.token_to_try:", "function returns a SimpleServer with an available WEB_PORT.\"\"\" for port", "implementation of tokens: # Every app id has its own", "user! Also, even if it has expired, we might still", "SimpleServer with an available WEB_PORT.\"\"\" for port in WEB_PORTS: try:", "file name for the current client. This assures that whenever", "the user to get a new token with the current", "provided, defaults to \"python-openidc-client/VERSION\" :kwarg cachedir: The directory in which", "that case, you will want to call report_token_issue() to try", "with self._cache_lock: self.__refresh_cache() self._valid_cache = [] self._printfd = printfd def", "= self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() resp", "web services relying on OpenID Connect. :param app_identifier: Identifier for", "POST.. \"\"\" ckwargs = copy(kwargs) scopes = ckwargs.pop('scopes') new_token =", "we had a valid token, use that self.last_returned_uuid = token[0]", "user or expired. In that case, you will want to", "= None resp = resp.json() if 'error' in resp: self.debug('Error", "be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Refreshing cache') if not", "lock across # multiple invocations self._cache_lock = Lock() with self._cache_lock:", "token that may or may not still be valid self.debug('Possible')", "that may or may not still be valid self.debug('Possible') possible_token", "if it could not be refreshed or saving the cache", "you to use a secret. :kwarg useragent: Useragent string to", "The token cache is a json serialized dict # This", "of the token to be updated :param toupdate: Dict indicating", "try: server = simple_server.make_server('0.0.0.0', port, app) return server except socket.error:", "header :kwarg client_id: The Client Identifier used to request credentials", "might have been revoked by the user or expired. In", "with open(self._cachefile, 'w') as f: f.write(json.dumps(self._cache)) def _add_token(self, token): \"\"\"Adds", "scopeset if we do not already have on. :kwarg auto_refresh:", "uuid, token # This is a token that may or", "token with the current app_identifier that has the required scopes,", "the following keys: # idp: The URL of the idp", "= {'client_id': self.client_id, 'grant_type': 'authorization_code', 'redirect_uri': return_uri, 'code': self._retrieved_code} if", "ckwargs['data']['access_token'] = token else: if 'headers' not in ckwargs: ckwargs['headers']", "'error' in resp: self.debug('Error exchanging authorization code: %s', resp['error']) return", "were unable to instantiate a webserver') return_uri = 'http://localhost:%i/' %", "possible_token: self.debug('Returning possible token') return possible_token def _idp_url(self, method): \"\"\"Returns", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "app_identifier: Identifier for storage of retrieved tokens :param id_provider: URL", "uuid) with self._cache_lock: self.__refresh_cache() if uuid in self._cache: self.debug('Removing token')", "return it. This function will return a bearer token or", "token self.__write_cache() return uuid def _update_token(self, uuid, toupdate): \"\"\"Updates a", "token was valid but your request failed due to a", "it might be refreshable. :param scopes: List of scopes that", "self.use_post = use_post self.idp = id_provider self.idp_mapping = id_provider_mapping self.client_id", "can close this window and return to the CLI'.encode('ascii')] self._retrieved_code", "None else: self.problem_reported = True return self._cache[self.last_returned_uuid]['access_token'] def send_request(self, *args,", "token: # If we had a valid token, use that", "renew the token that was last returned. If that worked,", "with self._cache_lock: self.__refresh_cache() if uuid not in self._cache: return None", "from copy import copy import json import logging from threading", "self._update_token( uuid, {'access_token': resp['access_token'], 'token_type': resp['token_type'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time()", "return uuid def _delete_token(self, uuid): \"\"\"Removes a token from the", "report_token_issue(self): \"\"\"Report an error with the last token that was", "return [u'You can close this window and return to the", "directory') os.makedirs(self.cachedir) if not os.path.exists(self._cachefile): self.debug('Creating file') with open(self._cachefile, 'w')", "cache. If you get an indication from your application that", "self.debug('Refreshing token %s', uuid) data = {'client_id': self.client_id, 'grant_type': 'refresh_token',", "fields %s', uuid, toupdate.keys()) with self._cache_lock: self.__refresh_cache() if uuid not", "logging.getLogger(__name__) self.debug = self.logger.debug self.app_id = app_identifier self.use_post = use_post", "we had requested with the token def __init__(self, app_identifier, id_provider,", "the bearer token might have been revoked by the user", "scopes %s', scopes) for uuid in self._cache: self.debug('Checking %s', uuid)", "is required. :kwarg scopes: Scopes required for this call. If", "v in split]) if 'error' in kv: self.debug('Error code returned:", "= ckwargs.pop('auto_refresh', True) method = ckwargs.pop('http_method', 'POST') is_retry = False", "be removed from cache \"\"\" self.debug('Removing token %s from cache',", "else: return resp @property def _cachefile(self): \"\"\"Property to get the", "data=data) resp.raise_for_status() resp = resp.json() if 'error' in resp: self.debug('Unable", "Lock() with self._cache_lock: self.__refresh_cache() self._valid_cache = [] self._printfd = printfd", "browser. This URL will be the Authorization endpoint of the", "return the UUID. If the user cancelled (or we got", "to use for our webserver WEB_PORTS = [12345, 23456] class", "http_method: The HTTP method to use, defaults to POST.. \"\"\"", "IdP. :kwarg use_post: Whether to use POST submission of client", "with specific scopes. This function will block until a token", "uuid: self.last_returned_uuid = uuid self.problem_reported = False return self._cache[uuid]['access_token'] def", "we can use to refresh the access token # scopes:", "= client_id self.client_secret = client_secret self.useragent = useragent or 'python-openid-client/%s'", "a 401 return code in incorrect cases. :kwargs http_method: The", "'text/plain')]) return [u'You can close this window and return to", "-*- coding: utf-8 -*- # # Copyright (C) 2016, 2017", "to be removed from cache \"\"\" self.debug('Removing token %s from", "possibly expired token: it might be refreshable. :param scopes: List", "del self._cache[uuid] self.__write_cache() else: self.debug('Token was already gone') def _get_token_with_scopes(self,", "%i tokens', len(self._cache)) with open(self._cachefile, 'w') as f: f.write(json.dumps(self._cache)) def", "scopes) for uuid in self._cache: self.debug('Checking %s', uuid) token =", "unable to create it, the OSError will be thrown. :kwargs", "None if self.use_post: if 'json' in ckwargs: raise ValueError('Cannot provide", "path for %s' % method) def _refresh_token(self, uuid): \"\"\"Tries to", ":rtype: string :returns: The IdP URL \"\"\" if method in", "self.__refresh_cache() self._valid_cache = [] self._printfd = printfd def get_token(self, scopes,", "if 'data' not in ckwargs: ckwargs['data'] = {} ckwargs['data']['access_token'] =", "try to renew the token or delete the token. :kwarg", "for %s' % method) def _refresh_token(self, uuid): \"\"\"Tries to refresh", "refresh the access token # scopes: A list of scopes", "provider to get tokens from :param id_provider_mapping: Mapping with URLs", "restriction, including without limitation the rights # to use, copy,", "not already have on. :rtype: string or None :returns: String", "\"\"\" if not self.last_returned_uuid: raise Exception('Cannot report issue before requesting", "if self.token_to_try: is_retry = True token = self.token_to_try self.token_to_try =", "and contents or None if no applicable tokens were found", "open(self._cachefile, 'w') as f: f.write(json.dumps(self._cache)) def _add_token(self, token): \"\"\"Adds a", "the cache. If you get an indication from your application", "ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED", "do not already have on. :rtype: string or None :returns:", "free of charge, to any person obtaining a copy #", "cache # The token cache is a json serialized dict", "valid self.debug('Possible') possible_token = (uuid, token) if possible_token: self.debug('Returning possible", "scopes. This function will block until a token is retrieved", "is always safe to call this though, since if we", "report issue before requesting token') if self.problem_reported: # We were", "else: self.debug('Token was already gone') def _get_token_with_scopes(self, scopes): \"\"\"Searches the", "'query' query = urlencode(rquery) authz_url = '%s?%s' % (self._idp_url('Authorization'), query)", "current time, but if no such tokens exist it will", "False if self.token_to_try: is_retry = True token = self.token_to_try self.token_to_try", "app_identifier self.use_post = use_post self.idp = id_provider self.idp_mapping = id_provider_mapping", "self.token_to_try self.token_to_try = None else: token = self.get_token(scopes, new_token=new_token) if", "an error with the last token that was returned. This", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "not work, we will return None and remove this token", "% server.socket.getsockname()[1] rquery = {} rquery['scope'] = ' '.join(scopes) rquery['response_type']", "file=self._printfd) webbrowser.open(authz_url) server.handle_request() server.server_close() assert self._retrieved_code is not None if", "or token was lacking specific permissions. \"\"\" if not self.last_returned_uuid:", "function will block until a token is retrieved if requested.", "assert self._cache_lock.locked() self.debug('Refreshing cache') if not os.path.isdir(self.cachedir): self.debug('Creating directory') os.makedirs(self.cachedir)", "from wsgiref import simple_server import requests import sys from openidc_client", "'error' in kv: self.debug('Error code returned: %s (%s)', kv['error'], kv.get('error_description'))", "will return the new access token. If it did not", "cache \"\"\" self.debug('Removing token %s from cache', uuid) with self._cache_lock:", "for this call. If a token is not present with", "error), we will return None. \"\"\" def _token_app(environ, start_response): query", "to get the cache file name for the current client.", "tokens with specific scopes. This function will block until a", "the token # access_token: Token value # token_type: Token type.", "token: UUID of the token to be updated :param toupdate:", "is a json dict with the following keys: # idp:", "kv.get('error_description')) self._retrieved_code = False else: self._retrieved_code = kv['code'] # Just", "self._cache to cache on disk. Requires cache_lock to be held", "OpenIDCClient(object): # Internal implementation of tokens: # Every app id", "or None if no applicable tokens were found \"\"\" possible_token", "type. Currently supported: \"Bearer\" # expires_at: Token expiration UTC time.", "message start_response('200 OK', [('Content-Type', 'text/plain')]) return [u'You can close this", "cache_lock a filesystem lock so we also lock across #", "os.path.exists(self._cachefile): self.debug('Creating file') with open(self._cachefile, 'w') as f: f.write(json.dumps({})) with", "cache_lock may not be held by anyone.\"\"\" with self._cache_lock: self.__refresh_cache()", "WEB_PORTS: try: server = simple_server.make_server('0.0.0.0', port, app) return server except", "time try: from StringIO import StringIO except ImportError: from io", "that's supposed to still be valid, prefer it # over", "code!') data = {'client_id': self.client_id, 'grant_type': 'authorization_code', 'redirect_uri': return_uri, 'code':", "that worked, we will return the new access token. If", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "= token else: if 'headers' not in ckwargs: ckwargs['headers'] =", "app_identifier that has the required scopes, we will return it.", "return resp else: return resp @property def _cachefile(self): \"\"\"Property to", "as f: f.write(json.dumps(self._cache)) def _add_token(self, token): \"\"\"Adds a token to", "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "if resp.status_code == 401 and not is_retry: if not auto_refresh:", "rather than Authorization header :kwarg client_id: The Client Identifier used", "token from the cache. If you get an indication from", "= True token = self.token_to_try self.token_to_try = None else: token", "either removing the token if it could not be refreshed", "[] self._printfd = printfd def get_token(self, scopes, new_token=True): \"\"\"Function to", "code returned: %s (%s)', kv['error'], kv.get('error_description')) self._retrieved_code = False else:", "Exception('We were unable to instantiate a webserver') return_uri = 'http://localhost:%i/'", "relying on OpenID Connect. :param app_identifier: Identifier for storage of", "by the user or expired. In that case, you will", "return server except socket.error: # This port did not work.", "for specific endpoints on the IdP. :kwarg use_post: Whether to", "None if your IdP does not require you to use", "charge, to any person obtaining a copy # of this", ":kwarg new_token: If True, we will actively request the user", "\"\"\" self.debug('Removing token %s from cache', uuid) with self._cache_lock: self.__refresh_cache()", "get the cache file name for the current client. This", "Even if the expires_at # indicates the token should still", "this permission notice shall be included in # all copies", "new_token=True): \"\"\"Function to retrieve tokens with specific scopes. This function", "self._cache: return None self._cache[uuid].update(toupdate) self.__write_cache() return uuid def _delete_token(self, uuid):", "to use, defaults to POST.. \"\"\" ckwargs = copy(kwargs) scopes", "will return it. This function will return a bearer token", "\"\"\" if method in self.idp_mapping: return self.idp + self.idp_mapping[method] else:", "to call report_token_issue() to try to renew the token or", "This function will return a bearer token or None. Note", "self.debug('Token was already gone') def _get_token_with_scopes(self, scopes): \"\"\"Searches the cache", "the user cancelled (or we got another error), we will", "if method not in ['POST']: raise ValueError('Cannot use POST tokens", "an authorization code!') data = {'client_id': self.client_id, 'grant_type': 'authorization_code', 'redirect_uri':", "have a valid token, now comes the hard part... uuid", "\"\"\"Report an error with the last token that was returned.", "call this though, since if we already have a token", "a new token with the specified scopes. The webserver will", "endpoint of the IdP with a request for our client_id", "1) for v in split]) if 'error' in kv: self.debug('Error", "self._delete_token(self.last_returned_uuid) return None refresh_result = self._refresh_token(self.last_returned_uuid) if not refresh_result: self._delete_token(self.last_returned_uuid)", "with this token, a new one will be requested unless", "a request for our client_id to get a new token", "by anyone. :param token: Dict of the token to be", "= self._get_new_token(scopes) if uuid: self.last_returned_uuid = uuid self.problem_reported = False", "anyone. :param token: UUID of the token to be updated", "ValueError('Cannot provide json in a post call') if method not", "'w') as f: f.write(json.dumps(self._cache)) def _add_token(self, token): \"\"\"Adds a token", "to refresh. :rtype: bool :returns: True if the token was", "self.__refresh_cache() self._cache[uuid] = token self.__write_cache() return uuid def _update_token(self, uuid,", "the possibly expired token: it might be refreshable. :param scopes:", "cache on disk. Requires cache_lock to be held by caller.\"\"\"", "ckwargs: raise ValueError('Cannot provide json in a post call') if", "You should explicitly NOT call this function if the token", "will return a bearer token or None. Note that the", "\"\"\"Property to get the cache file name for the current", "be held by anyone. :param token: UUID of the token", "resp['error']) return None token = {'access_token': resp['access_token'], 'refresh_token': resp['refresh_token'], 'expires_at':", "report_token_issue() to try to renew the token or delete the", "self._retrieved_code = None resp = resp.json() if 'error' in resp:", "resp['refresh_token'], 'expires_at': time.time() + resp['expires_in']}) self.debug('Refreshed until %s', self._cache[uuid]['expires_at']) return", "has its own token cache # The token cache is", "the token to be updated :param toupdate: Dict indicating which", "import webbrowser from wsgiref import simple_server import requests import sys", "ports that we will try to use for our webserver", "'http://localhost:%i/' % server.socket.getsockname()[1] rquery = {} rquery['scope'] = ' '.join(scopes)", "the token to be added to the cache \"\"\" uuid", "(self._idp_url('Authorization'), query) print('Please visit %s to grant authorization' % authz_url,", "os try: from urllib import urlencode except ImportError: from urllib.parse", "% \\ release.VERSION self.cachedir = os.path.expanduser(cachedir or '~/.openidc') self.last_returned_uuid =", "= {} ckwargs['data']['access_token'] = token else: if 'headers' not in", "import logging from threading import Lock import time try: from", "modify, merge, publish, distribute, sublicense, and/or sell # copies of", "= requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() resp = resp.json() if", "%s to grant authorization' % authz_url, file=self._printfd) webbrowser.open(authz_url) server.handle_request() server.server_close()", "not already have on. :kwarg auto_refresh: If False, will not", "rquery = {} rquery['scope'] = ' '.join(scopes) rquery['response_type'] = 'code'", "had a valid token, use that self.last_returned_uuid = token[0] self.problem_reported", "the specified scopes. The webserver will then need to catch", "keyword arguments are like the arguments to requests, except for", "self.debug('Removing token %s from cache', uuid) with self._cache_lock: self.__refresh_cache() if", "self.last_returned_uuid: raise Exception('Cannot report issue before requesting token') if self.problem_reported:", "IN THE # SOFTWARE. \"\"\"Client for applications relying on OpenID", "still be able to # refresh the token. # refresh_token:", "token = self.get_token(scopes, new_token=new_token) if not token: return None if", "== 401 and not is_retry: if not auto_refresh: return resp", "'expires_at': time.time() + int(resp['expires_in']), 'idp': self.idp, 'token_type': resp['token_type'], 'scopes': scopes}", "we will try to use for our webserver WEB_PORTS =", "token might have been revoked by the user or expired.", "to get a new token with the current scopeset if", "on 401. This helps with broken apps that may send", "method in self.idp_mapping: return self.idp + self.idp_mapping[method] else: return ValueError('Idp", "= resp.json() if 'error' in resp: self.debug('Unable to refresh, error:", "a server error or because the account or token was", "DEALINGS IN THE # SOFTWARE. \"\"\"Client for applications relying on", "to instantiate a webserver') return_uri = 'http://localhost:%i/' % server.socket.getsockname()[1] rquery", "f: f.write(json.dumps(self._cache)) def _add_token(self, token): \"\"\"Adds a token to the", "requested scopes. It will prefer to return tokens whose expires_at", "_idp_url(self, method): \"\"\"Returns the IdP URL for the requested method.", "will start a new webserver on one of the WEB_PORTS,", "kv = dict([v.split('=', 1) for v in split]) if 'error'", "refresh the token. # refresh_token: The token we can use", "or None. Note that the bearer token might have been", "or because the account or token was lacking specific permissions.", "os.path.expanduser(cachedir or '~/.openidc') self.last_returned_uuid = None self.problem_reported = False self.token_to_try", "self.token_to_try = self.report_token_issue() if not self.token_to_try: return resp return self.send_request(*args,", "# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "one continue def _get_new_token(self, scopes): \"\"\"This function kicks off some", "URL of the identity provider to get tokens from :param", "!= self.idp: self.debug('Incorrect idp') continue if not set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope:", "return None self.debug('We got an authorization code!') data = {'client_id':", "by caller.\"\"\" assert self._cache_lock.locked() self.debug('Writing cache with %i tokens', len(self._cache))", "for `scopes`, `new_token` and `auto_refresh` keyword arguments. `scopes` is required.", "by # the user! Also, even if it has expired,", "# The ports that we will try to use for", "hard part... uuid = self._get_new_token(scopes) if uuid: self.last_returned_uuid = uuid", "import urlencode except ImportError: from urllib.parse import urlencode from uuid", "# SOFTWARE. \"\"\"Client for applications relying on OpenID Connect for", "= client_secret self.useragent = useragent or 'python-openid-client/%s' % \\ release.VERSION", "scopes: Scopes required for this call. If a token is", "use_post=False, useragent=None, cachedir=None, printfd=sys.stdout): \"\"\"Client for interacting with web services", "it # over any others we have self.debug('Not yet expired,", "put through expanduer. Default is ~/.openidc. If this does not", "revoked by the user or expired. In that case, you", "of the cached token to attempt to refresh. :rtype: bool", "Hat Author: <NAME> <<EMAIL>> # # Permission is hereby granted,", "on the IdP. :kwarg use_post: Whether to use POST submission", "None if no applicable tokens were found \"\"\" possible_token =", "self._cache_lock: self.__refresh_cache() def __write_cache(self): \"\"\"Wirtes self._cache to cache on disk.", "\"\"\" self.logger = logging.getLogger(__name__) self.debug = self.logger.debug self.app_id = app_identifier", "= self._refresh_token(self.last_returned_uuid) if not refresh_result: self._delete_token(self.last_returned_uuid) return None else: self.problem_reported", "possible token') return possible_token def _idp_url(self, method): \"\"\"Returns the IdP", "if self.use_post: if 'json' in ckwargs: raise ValueError('Cannot provide json", "to the CLI'.encode('ascii')] self._retrieved_code = None server = self._get_server(_token_app) if", "if we do not already have on. :kwarg auto_refresh: If", "token pairs # Every \"token\" object is a json dict", "self.__write_cache() else: self.debug('Token was already gone') def _get_token_with_scopes(self, scopes): \"\"\"Searches", "it did not work, we will return None and remove", "such tokens exist it will return the possibly expired token:", "a token is not present with this token, a new", "current client. This assures that whenever this file is touched,", "string or None :returns: String bearer token if possible or", "tokens whose expires_at is still before the current time, but", "return_uri = 'http://localhost:%i/' % server.socket.getsockname()[1] rquery = {} rquery['scope'] =", "def send_request(self, *args, **kwargs): \"\"\"Make an python-requests POST request. Allarguments", "send a 401 return code in incorrect cases. :kwargs http_method:", "and return to the CLI'.encode('ascii')] self._retrieved_code = None server =", "need to catch the return with either an Authorization Code", "token from the cache and writes cache to disk. cache_lock", "did not work. Switch to next one continue def _get_new_token(self,", "we will return None. \"\"\" def _token_app(environ, start_response): query =", "return resp return self.send_request(*args, **kwargs) elif resp.status_code == 401: #", "<<EMAIL>> # # Permission is hereby granted, free of charge,", "2016, 2017 Red Hat, Inc. # Red Hat Author: <NAME>", "from threading import Lock import time try: from StringIO import", "= False else: self._retrieved_code = kv['code'] # Just return a", "or None \"\"\" if not isinstance(scopes, list): raise ValueError('Scopes must", "= ' '.join(scopes) rquery['response_type'] = 'code' rquery['client_id'] = self.client_id rquery['redirect_uri']", "elif resp.status_code == 401: # We got a 401 and", "socket.error: # This port did not work. Switch to next", "block until a token is retrieved if requested. It is", "the token should still be valid, it may have been", "scopes): \"\"\"This function kicks off some magic. We will start", "os.makedirs(self.cachedir) if not os.path.exists(self._cachefile): self.debug('Creating file') with open(self._cachefile, 'w') as", "an python-requests POST request. Allarguments and keyword arguments are like", "time.time(): # This is a token that's supposed to still", "request credentials :kwarg client_secret: The client \"secret\" that goes with", "self.problem_reported: # We were reported an issue before. Let's just", "uuid, {'access_token': resp['access_token'], 'token_type': resp['token_type'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() +", "we will actively request the user to get a new", "will be requested unless nonblocking is True. :kwarg new_token: If", "self.__write_cache() return uuid def _update_token(self, uuid, toupdate): \"\"\"Updates a token", "Hat, Inc. # Red Hat Author: <NAME> <<EMAIL>> # #", "valid but your request failed due to a server error", "_refresh_token(self, uuid): \"\"\"Tries to refresh a token and put the", "the Authorization endpoint of the IdP with a request for", "self.cachedir = os.path.expanduser(cachedir or '~/.openidc') self.last_returned_uuid = None self.problem_reported =", "client secrets rather than Authorization header :kwarg client_id: The Client", "\"\"\"Tries to refresh a token and put the refreshed token", "scope: %s not subset of %s', set(scopes), set(token['scopes'])) continue if", "not be held by anyone. :param token: UUID of the", "current client. :kwarg new_token: If True, we will actively request", "uuid: The UUID of the cached token to attempt to", "the token caches. Will be put through expanduer. Default is", "False self.token_to_try = None self._retrieved_code = None # TODO: Make", "is still before the current time, but if no such", "token # sub: The subject that owns the token #", "get an indication from your application that the token you", "in self._cache: self.debug('Checking %s', uuid) token = self._cache[uuid] if token['idp']", "secrets rather than Authorization header :kwarg client_id: The Client Identifier", "data=data) resp.raise_for_status() self._retrieved_code = None resp = resp.json() if 'error'", "None refresh_result = self._refresh_token(self.last_returned_uuid) if not refresh_result: self._delete_token(self.last_returned_uuid) return None", "or may not still be valid self.debug('Possible') possible_token = (uuid,", "server: raise Exception('We were unable to instantiate a webserver') return_uri", "the returned token :rtype: (string, dict) or None :returns: Token", "self._idp_url('Token'), data=data) resp.raise_for_status() self._retrieved_code = None resp = resp.json() if", "WEB_PORT.\"\"\" for port in WEB_PORTS: try: server = simple_server.make_server('0.0.0.0', port,", "scopes. It will prefer to return tokens whose expires_at is", "held \"\"\" assert self._cache_lock.locked() return os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id) def", "will prefer to return tokens whose expires_at is still before", "self._cache_lock: self.__refresh_cache() self._valid_cache = [] self._printfd = printfd def get_token(self,", "refreshable. :param scopes: List of scopes that need to be", "(%s)', kv['error'], kv.get('error_description')) self._retrieved_code = False else: self._retrieved_code = kv['code']", "CLI'.encode('ascii')] self._retrieved_code = None server = self._get_server(_token_app) if not server:", "return token[1]['access_token'] elif not new_token: return None # We did", "copy import json import logging from threading import Lock import", "broken apps that may send a 401 return code in", "return self._cache[self.last_returned_uuid]['access_token'] def send_request(self, *args, **kwargs): \"\"\"Make an python-requests POST", "not self.last_returned_uuid: raise Exception('Cannot report issue before requesting token') if", "supported: \"Bearer\" # expires_at: Token expiration UTC time. NOTE: Even", "case, you will want to call report_token_issue() to try to", "arguments are like the arguments to requests, except for `scopes`,", "self._get_new_token(scopes) if uuid: self.last_returned_uuid = uuid self.problem_reported = False return", "been revoked by # the user! Also, even if it", "%s (%s)', kv['error'], kv.get('error_description')) self._retrieved_code = False else: self._retrieved_code =", "have a token with the current app_identifier that has the", "deal # in the Software without restriction, including without limitation", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell #", "for port in WEB_PORTS: try: server = simple_server.make_server('0.0.0.0', port, app)", "a json serialized dict # This dict contains uuid: token", "part... uuid = self._get_new_token(scopes) if uuid: self.last_returned_uuid = uuid self.problem_reported", "notice and this permission notice shall be included in #", "self.debug('Loaded %i tokens', len(self._cache)) def _refresh_cache(self): \"\"\"Refreshes the self._cache from", "renewal was succesful. :param uuid: The UUID of the cached", "in which to store the token caches. Will be put", "already gone') def _get_token_with_scopes(self, scopes): \"\"\"Searches the cache for any", "release # The ports that we will try to use", "not be held by anyone. :param uuid: UUID of the", "f.write(json.dumps(self._cache)) def _add_token(self, token): \"\"\"Adds a token to the cache", "If not provided, defaults to \"python-openidc-client/VERSION\" :kwarg cachedir: The directory", "UTC time. NOTE: Even if the expires_at # indicates the", "get tokens from :param id_provider_mapping: Mapping with URLs to use", "file is touched, the cache lock is held \"\"\" assert", "be a list') token = self._get_token_with_scopes(scopes) if token: # If", "by anyone. :param uuid: UUID of the token to be", "# If we had a valid token, use that self.last_returned_uuid", "self.debug('Creating file') with open(self._cachefile, 'w') as f: f.write(json.dumps({})) with open(self._cachefile,", "refreshed, False otherwise \"\"\" oldtoken = self._cache[uuid] self.debug('Refreshing token %s',", "in kv: self.debug('Error code returned: %s (%s)', kv['error'], kv.get('error_description')) self._retrieved_code", "you sent was invalid, you should call it. You should", "self.client_id, 'grant_type': 'refresh_token', 'refresh_token': oldtoken['refresh_token']} if self.client_secret: data['client_secret'] = self.client_secret", "self._get_server(_token_app) if not server: raise Exception('We were unable to instantiate", "to use a secret. :kwarg useragent: Useragent string to use.", "is a token that may or may not still be", "print_function from copy import copy import json import logging from", "that the token you sent was invalid, you should call", "id_provider self.idp_mapping = id_provider_mapping self.client_id = client_id self.client_secret = client_secret", "`scopes` is required. :kwarg scopes: Scopes required for this call.", "%s', self._cache[uuid]['expires_at']) return True def _get_server(self, app): \"\"\"This function returns", "resp = resp.json() if 'error' in resp: self.debug('Error exchanging authorization", "available WEB_PORT.\"\"\" for port in WEB_PORTS: try: server = simple_server.make_server('0.0.0.0',", "defaults to \"python-openidc-client/VERSION\" :kwarg cachedir: The directory in which to", ":kwarg client_secret: The client \"secret\" that goes with the client_id.", "None self.debug('We got an authorization code!') data = {'client_id': self.client_id,", "use, defaults to POST.. \"\"\" ckwargs = copy(kwargs) scopes =", "'r') as f: self._cache = json.loads(f.read()) self.debug('Loaded %i tokens', len(self._cache))", "create it, the OSError will be thrown. :kwargs printfd: The", "# The token cache is a json serialized dict #", "_get_token_with_scopes(self, scopes): \"\"\"Searches the cache for any tokens that have", "self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() resp =", "UUID of the token to be updated :param toupdate: Dict", "an indication from your application that the token you sent", "UUID of the cached token to attempt to refresh. :rtype:", "did not have a valid token, now comes the hard", "This is a token that's supposed to still be valid,", "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "File object to print token instructions to. \"\"\" self.logger =", "still before the current time, but if no such tokens", "return None and remove this token from the cache. If", "to automatically report token issues on 401. This helps with", "to deal # in the Software without restriction, including without", "['POST']: raise ValueError('Cannot use POST tokens in %s method' %", "possible_token = None self.debug('Trying to get token with scopes %s',", "refresh_result = self._refresh_token(self.last_returned_uuid) if not refresh_result: self._delete_token(self.last_returned_uuid) return None else:", "URL of the idp that issued the token # sub:", "self._cache from the cache on disk. Requires cache_lock to be", "be refreshed or saving the cache if renewal was succesful.", "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #", "'%s?%s' % (self._idp_url('Authorization'), query) print('Please visit %s to grant authorization'", "IdP URL for the requested method. :param method: The method", "in %s method' % method) if 'data' not in ckwargs:", "window and return to the CLI'.encode('ascii')] self._retrieved_code = None server", "print('Please visit %s to grant authorization' % authz_url, file=self._printfd) webbrowser.open(authz_url)", "self.debug('We got an authorization code!') data = {'client_id': self.client_id, 'grant_type':", "class OpenIDCClient(object): # Internal implementation of tokens: # Every app", "FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN", "token[1]['access_token'] elif not new_token: return None # We did not", "URL will be the Authorization endpoint of the IdP with", "elif not new_token: return None # We did not have", "if no such tokens exist it will return the possibly", "if not auto_refresh: return resp self.token_to_try = self.report_token_issue() if not", "release.VERSION self.cachedir = os.path.expanduser(cachedir or '~/.openidc') self.last_returned_uuid = None self.problem_reported", "new webserver on one of the WEB_PORTS, and then either", "from the cache and writes cache to disk. cache_lock may", "self.idp_mapping = id_provider_mapping self.client_id = client_id self.client_secret = client_secret self.useragent", "cache_lock may not be held by anyone. :param uuid: UUID", "be held by anyone. :param token: Dict of the token", "must be a list') token = self._get_token_with_scopes(scopes) if token: #", "simple_server.make_server('0.0.0.0', port, app) return server except socket.error: # This port", "applicable tokens were found \"\"\" possible_token = None self.debug('Trying to", "start_response): query = environ['QUERY_STRING'] split = query.split('&') kv = dict([v.split('=',", "the last token that was returned. This will attempt to", "\"\"\"This function kicks off some magic. We will start a", "oldtoken = self._cache[uuid] self.debug('Refreshing token %s', uuid) data = {'client_id':", "# Every app id has its own token cache #", "to create it, the OSError will be thrown. :kwargs printfd:", "import json import logging from threading import Lock import time", "of scopes required for the current client. :kwarg new_token: If", "app) return server except socket.error: # This port did not", "either show the user a URL, or if possible, kick", "have been revoked by the user or expired. In that", "be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Writing cache with %i", "This is a token that may or may not still", "work. Switch to next one continue def _get_new_token(self, scopes): \"\"\"This", "the IdP with a request for our client_id to get", "**kwargs): \"\"\"Make an python-requests POST request. Allarguments and keyword arguments", "be updated :param toupdate: Dict indicating which fields need to", "touched, the cache lock is held \"\"\" assert self._cache_lock.locked() return", "refreshed or saving the cache if renewal was succesful. :param", "ckwargs['data'] = {} ckwargs['data']['access_token'] = token else: if 'headers' not", "you get an indication from your application that the token", "by anyone. :param token: UUID of the token to be", "self.last_returned_uuid = uuid self.problem_reported = False return self._cache[uuid]['access_token'] def report_token_issue(self):", "is not None if self._retrieved_code is False: # The user", "May be None if your IdP does not require you", "as f: f.write(json.dumps({})) with open(self._cachefile, 'r') as f: self._cache =", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "to be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Refreshing cache') if", "dict. :rtype: string :returns: The IdP URL \"\"\" if method", "then return the UUID. If the user cancelled (or we", "is False: # The user cancelled the request self._retrieved_code =", "got an authorization code!') data = {'client_id': self.client_id, 'grant_type': 'authorization_code',", "bool :returns: True if the token was succesfully refreshed, False", "interacting with web services relying on OpenID Connect. :param app_identifier:", "or substantial portions of the Software. # # THE SOFTWARE", ":param token: UUID of the token to be updated :param", "self._delete_token(self.last_returned_uuid) return None else: self.problem_reported = True return self._cache[self.last_returned_uuid]['access_token'] def", "an issue before. Let's just remove this token. self._delete_token(self.last_returned_uuid) return", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "= None self._retrieved_code = None # TODO: Make cache_lock a", "toupdate: Dict indicating which fields need to be updated \"\"\"", "NOT call this function if the token was valid but", "CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION", "a json dict with the following keys: # idp: The", "can use to refresh the access token # scopes: A", "# # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "%s in cache, fields %s', uuid, toupdate.keys()) with self._cache_lock: self.__refresh_cache()", "the token was valid but your request failed due to", "not os.path.isdir(self.cachedir): self.debug('Creating directory') os.makedirs(self.cachedir) if not os.path.exists(self._cachefile): self.debug('Creating file')", "might be refreshable. :param scopes: List of scopes that need", "assert self._retrieved_code is not None if self._retrieved_code is False: #", "a list') token = self._get_token_with_scopes(scopes) if token: # If we", "this does not exist and we are unable to create", "id has its own token cache # The token cache", "list of scopes that we had requested with the token", "we will exchange for an access token) or the cancellation", "until %s', self._cache[uuid]['expires_at']) return True def _get_server(self, app): \"\"\"This function", "identity provider to get tokens from :param id_provider_mapping: Mapping with", "HTTP method to use, defaults to POST.. \"\"\" ckwargs =", "issued the token # sub: The subject that owns the", "self.__write_cache() return uuid def _delete_token(self, uuid): \"\"\"Removes a token from", "token') return possible_token def _idp_url(self, method): \"\"\"Returns the IdP URL", ":param token: Dict of the token to be added to", "disk. cache_lock may not be held by anyone. :param token:", "if not token: return None if self.use_post: if 'json' in", "the access token # scopes: A list of scopes that", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER", "the required scopes, we will return it. This function will", "= 'Bearer %s' % token resp = requests.request(method, *args, **ckwargs)", "mapping dict. :rtype: string :returns: The IdP URL \"\"\" if", "so, subject to the following conditions: # # The above", "# Every \"token\" object is a json dict with the", "token in the cache. cache_lock may not be held by", "token['expires_at'] < time.time(): # This is a token that's supposed", "if not self.last_returned_uuid: raise Exception('Cannot report issue before requesting token')", "401. This helps with broken apps that may send a", "to be updated :param toupdate: Dict indicating which fields need", "is True. :kwarg new_token: If True, we will actively request", "a retry. Report error self.report_token_issue() return resp else: return resp", "a new webserver on one of the WEB_PORTS, and then", "return self._cache[uuid]['access_token'] def report_token_issue(self): \"\"\"Report an error with the last", "return a message start_response('200 OK', [('Content-Type', 'text/plain')]) return [u'You can", "self.logger = logging.getLogger(__name__) self.debug = self.logger.debug self.app_id = app_identifier self.use_post", "a SimpleServer with an available WEB_PORT.\"\"\" for port in WEB_PORTS:", "ckwargs.pop('new_token', True) auto_refresh = ckwargs.pop('auto_refresh', True) method = ckwargs.pop('http_method', 'POST')", "\"\"\" possible_token = None self.debug('Trying to get token with scopes", "resp['token_type'], 'scopes': scopes} # AND WE ARE DONE! \\o/ return", "copy(kwargs) scopes = ckwargs.pop('scopes') new_token = ckwargs.pop('new_token', True) auto_refresh =", "value # token_type: Token type. Currently supported: \"Bearer\" # expires_at:", "the following conditions: # # The above copyright notice and", "FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE", "subset of %s', set(scopes), set(token['scopes'])) continue if token['expires_at'] < time.time():", "try: from urllib import urlencode except ImportError: from urllib.parse import", "string to use. If not provided, defaults to \"python-openidc-client/VERSION\" :kwarg", "with the last token that was returned. This will attempt", "set(scopes), set(token['scopes'])) continue if token['expires_at'] < time.time(): # This is", "the self._cache from the cache on disk. cache_lock may not", "StringIO except ImportError: from io import StringIO import socket import", "we have self.debug('Not yet expired, returning') return uuid, token #", "'scopes': scopes} # AND WE ARE DONE! \\o/ return self._add_token(token)", "Scopes required for this call. If a token is not", "error self.report_token_issue() return resp else: return resp @property def _cachefile(self):", "the cache for any tokens that have the requested scopes.", "to store the token caches. Will be put through expanduer.", "the cached token to attempt to refresh. :rtype: bool :returns:", "uuid, toupdate): \"\"\"Updates a token in the cache. cache_lock may", "in # all copies or substantial portions of the Software.", "def _cachefile(self): \"\"\"Property to get the cache file name for", "cache \"\"\" uuid = uuidgen().hex self.debug('Adding token %s to cache',", "= id_provider self.idp_mapping = id_provider_mapping self.client_id = client_id self.client_secret =", "nonblocking is True. :kwarg new_token: If True, we will actively", "services relying on OpenID Connect. :param app_identifier: Identifier for storage", "Red Hat, Inc. # Red Hat Author: <NAME> <<EMAIL>> #", "resp.json() if 'error' in resp: self.debug('Unable to refresh, error: %s',", "if not set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope: %s not subset of %s',", "with the token def __init__(self, app_identifier, id_provider, id_provider_mapping, client_id, client_secret=None,", "caller is responsible for either removing the token if it", "associated documentation files (the \"Software\"), to deal # in the", "in the cache. cache_lock may not be held by anyone.", "delete the token. :kwarg scopes: A list of scopes required", "as uuidgen import webbrowser from wsgiref import simple_server import requests", "with the client_id. May be None if your IdP does", "function if the token was valid but your request failed", "Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "to any person obtaining a copy # of this software", "or None :returns: Token UUID and contents or None if", "This function will store the new token in the local", "of client secrets rather than Authorization header :kwarg client_id: The", "be the Authorization endpoint of the IdP with a request", "self.logger.debug self.app_id = app_identifier self.use_post = use_post self.idp = id_provider", "wsgiref import simple_server import requests import sys from openidc_client import", "succesful. :param uuid: The UUID of the cached token to", "of the Software, and to permit persons to whom the", "'refresh_token': resp['refresh_token'], 'expires_at': time.time() + int(resp['expires_in']), 'idp': self.idp, 'token_type': resp['token_type'],", "cache on disk. cache_lock may not be held by anyone.\"\"\"", "self._cache_lock: self.__refresh_cache() if uuid in self._cache: self.debug('Removing token') del self._cache[uuid]", "refreshed token in self._cache The caller is responsible for either", "was returned. This will attempt to renew the token that", "client_id, client_secret=None, use_post=False, useragent=None, cachedir=None, printfd=sys.stdout): \"\"\"Client for interacting with", "Mapping with URLs to use for specific endpoints on the", "another error), we will return None. \"\"\" def _token_app(environ, start_response):", "the identity provider to get tokens from :param id_provider_mapping: Mapping", "new one will be requested unless nonblocking is True. :kwarg", "token to be added to the cache \"\"\" uuid =", "token that's supposed to still be valid, prefer it #", "this call. If a token is not present with this", "endpoints on the IdP. :kwarg use_post: Whether to use POST", "urllib import urlencode except ImportError: from urllib.parse import urlencode from", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "None self.problem_reported = False self.token_to_try = None self._retrieved_code = None", "WEB_PORTS, and then either show the user a URL, or", "< time.time(): # This is a token that's supposed to", "dict) or None :returns: Token UUID and contents or None", "set(token['scopes'])) continue if token['expires_at'] < time.time(): # This is a", "sublicense, and/or sell # copies of the Software, and to", ":kwarg client_id: The Client Identifier used to request credentials :kwarg", "'data' not in ckwargs: ckwargs['data'] = {} ckwargs['data']['access_token'] = token", "not require you to use a secret. :kwarg useragent: Useragent", "will not try to automatically report token issues on 401.", "because the account or token was lacking specific permissions. \"\"\"", "to use. If not provided, defaults to \"python-openidc-client/VERSION\" :kwarg cachedir:", "tokens from :param id_provider_mapping: Mapping with URLs to use for", "then need to catch the return with either an Authorization", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING", "to disk. cache_lock may not be held by anyone. :param", "in the Software without restriction, including without limitation the rights", "show the user a URL, or if possible, kick off", "% (self._idp_url('Authorization'), query) print('Please visit %s to grant authorization' %", "scopes, new_token=True): \"\"\"Function to retrieve tokens with specific scopes. This", "that need to be in the returned token :rtype: (string,", "grant authorization' % authz_url, file=self._printfd) webbrowser.open(authz_url) server.handle_request() server.server_close() assert self._retrieved_code", "have on. :kwarg auto_refresh: If False, will not try to", "either an Authorization Code (that we will exchange for an", "= None self.problem_reported = False self.token_to_try = None self._retrieved_code =", "and then return the UUID. If the user cancelled (or", "We got a 401 and this is a retry. Report", "might still be able to # refresh the token. #", "valid token, now comes the hard part... uuid = self._get_new_token(scopes)", "Requires cache_lock to be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Refreshing", "return a bearer token or None. Note that the bearer", "23456] class OpenIDCClient(object): # Internal implementation of tokens: # Every", "use to refresh the access token # scopes: A list", "send_request(self, *args, **kwargs): \"\"\"Make an python-requests POST request. Allarguments and", "be held by anyone.\"\"\" with self._cache_lock: self.__refresh_cache() def __write_cache(self): \"\"\"Wirtes", "not is_retry: if not auto_refresh: return resp self.token_to_try = self.report_token_issue()", "tokens', len(self._cache)) def _refresh_cache(self): \"\"\"Refreshes the self._cache from the cache", "found \"\"\" possible_token = None self.debug('Trying to get token with", "including without limitation the rights # to use, copy, modify,", "of the identity provider to get tokens from :param id_provider_mapping:", "= logging.getLogger(__name__) self.debug = self.logger.debug self.app_id = app_identifier self.use_post =", "continue if token['expires_at'] < time.time(): # This is a token", "{'client_id': self.client_id, 'grant_type': 'authorization_code', 'redirect_uri': return_uri, 'code': self._retrieved_code} if self.client_secret:", "resp.status_code == 401 and not is_retry: if not auto_refresh: return", "keys: # idp: The URL of the idp that issued", "is_retry = False if self.token_to_try: is_retry = True token =", "specific permissions. \"\"\" if not self.last_returned_uuid: raise Exception('Cannot report issue", "webserver on one of the WEB_PORTS, and then either show", "will then need to catch the return with either an", "that the bearer token might have been revoked by the", "cache is a json serialized dict # This dict contains", "be thrown. :kwargs printfd: The File object to print token", "except for `scopes`, `new_token` and `auto_refresh` keyword arguments. `scopes` is", "that we will try to use for our webserver WEB_PORTS", "your request failed due to a server error or because", "with %i tokens', len(self._cache)) with open(self._cachefile, 'w') as f: f.write(json.dumps(self._cache))", "ImportError: from io import StringIO import socket import os try:", "self.use_post: if 'json' in ckwargs: raise ValueError('Cannot provide json in", "with the specified scopes. The webserver will then need to", "json import logging from threading import Lock import time try:", "%s', resp['error']) return None token = {'access_token': resp['access_token'], 'refresh_token': resp['refresh_token'],", "of the Software. # # THE SOFTWARE IS PROVIDED \"AS", "retrieved if requested. It is always safe to call this", "now comes the hard part... uuid = self._get_new_token(scopes) if uuid:", "return the possibly expired token: it might be refreshable. :param", "uuid) data = {'client_id': self.client_id, 'grant_type': 'refresh_token', 'refresh_token': oldtoken['refresh_token']} if", "{} rquery['scope'] = ' '.join(scopes) rquery['response_type'] = 'code' rquery['client_id'] =", "contents or None if no applicable tokens were found \"\"\"", "This assures that whenever this file is touched, the cache", "Let's just remove this token. self._delete_token(self.last_returned_uuid) return None refresh_result =", "user cancelled the request self._retrieved_code = None self.debug('User cancelled') return", "= None else: token = self.get_token(scopes, new_token=new_token) if not token:", "self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() self._retrieved_code =", "The URL of the idp that issued the token #", "raise ValueError('Scopes must be a list') token = self._get_token_with_scopes(scopes) if", "requesting token') if self.problem_reported: # We were reported an issue", "Make cache_lock a filesystem lock so we also lock across", "in resp: self.debug('Unable to refresh, error: %s', resp['error']) return False", "utf-8 -*- # # Copyright (C) 2016, 2017 Red Hat,", "we will return it. This function will return a bearer", "UUID of the token to be removed from cache \"\"\"", "= 'code' rquery['client_id'] = self.client_id rquery['redirect_uri'] = return_uri rquery['response_mode'] =", "self._cache[uuid]['access_token'] def report_token_issue(self): \"\"\"Report an error with the last token", "List of scopes that need to be in the returned", "self._cache_lock.locked() self.debug('Writing cache with %i tokens', len(self._cache)) with open(self._cachefile, 'w')", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "self._cache[uuid]['expires_at']) return True def _get_server(self, app): \"\"\"This function returns a", "resp = requests.request(method, *args, **ckwargs) if resp.status_code == 401 and", "uuid: token pairs # Every \"token\" object is a json", "still be valid, it may have been revoked by #", "It is always safe to call this though, since if", "# multiple invocations self._cache_lock = Lock() with self._cache_lock: self.__refresh_cache() self._valid_cache", "ValueError('Cannot use POST tokens in %s method' % method) if", "port did not work. Switch to next one continue def", "cachedir: The directory in which to store the token caches.", "if the token was succesfully refreshed, False otherwise \"\"\" oldtoken", "self._cache_lock.locked() self.debug('Refreshing cache') if not os.path.isdir(self.cachedir): self.debug('Creating directory') os.makedirs(self.cachedir) if", "Useragent string to use. If not provided, defaults to \"python-openidc-client/VERSION\"", "in resp: self.debug('Error exchanging authorization code: %s', resp['error']) return None", "(the \"Software\"), to deal # in the Software without restriction,", "We will start a new webserver on one of the", "last returned. If that worked, we will return the new", "'token_type': resp['token_type'], 'scopes': scopes} # AND WE ARE DONE! \\o/", "%s', uuid) token = self._cache[uuid] if token['idp'] != self.idp: self.debug('Incorrect", "401 and not is_retry: if not auto_refresh: return resp self.token_to_try", "worked, we will return the new access token. If it", "the token you sent was invalid, you should call it.", "but if no such tokens exist it will return the", "in self.idp_mapping: return self.idp + self.idp_mapping[method] else: return ValueError('Idp Mapping", "use. If not provided, defaults to \"python-openidc-client/VERSION\" :kwarg cachedir: The", "permit persons to whom the Software is # furnished to", "from __future__ import print_function from copy import copy import json", "in self._cache: self.debug('Removing token') del self._cache[uuid] self.__write_cache() else: self.debug('Token was", "%s' % method) def _refresh_token(self, uuid): \"\"\"Tries to refresh a", "THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE", "an available WEB_PORT.\"\"\" for port in WEB_PORTS: try: server =", "a valid token, use that self.last_returned_uuid = token[0] self.problem_reported =", "above copyright notice and this permission notice shall be included", "not be refreshed or saving the cache if renewal was", "possible_token def _idp_url(self, method): \"\"\"Returns the IdP URL for the", "by caller.\"\"\" assert self._cache_lock.locked() self.debug('Refreshing cache') if not os.path.isdir(self.cachedir): self.debug('Creating", "token[0] self.problem_reported = False return token[1]['access_token'] elif not new_token: return", "that whenever this file is touched, the cache lock is", "'expires_at': time.time() + resp['expires_in']}) self.debug('Refreshed until %s', self._cache[uuid]['expires_at']) return True", "In that case, you will want to call report_token_issue() to", "token if it could not be refreshed or saving the", "without limitation the rights # to use, copy, modify, merge,", "idp: The URL of the idp that issued the token", "from the cache. If you get an indication from your", "to catch the return with either an Authorization Code (that", "EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "else: if 'headers' not in ckwargs: ckwargs['headers'] = {} ckwargs['headers']['Authorization']", "a 401 and this is a retry. Report error self.report_token_issue()", "pairs # Every \"token\" object is a json dict with", "self.token_to_try = None else: token = self.get_token(scopes, new_token=new_token) if not", "'code' rquery['client_id'] = self.client_id rquery['redirect_uri'] = return_uri rquery['response_mode'] = 'query'", "an Authorization Code (that we will exchange for an access", "call this function if the token was valid but your", "that goes with the client_id. May be None if your", "= ckwargs.pop('new_token', True) auto_refresh = ckwargs.pop('auto_refresh', True) method = ckwargs.pop('http_method',", "StringIO import socket import os try: from urllib import urlencode", "cache for any tokens that have the requested scopes. It", "of %s', set(scopes), set(token['scopes'])) continue if token['expires_at'] < time.time(): #", "'POST', self._idp_url('Token'), data=data) resp.raise_for_status() resp = resp.json() if 'error' in", "else: return ValueError('Idp Mapping did not include path for %s'", "we do not already have on. :rtype: string or None", "token that was last returned. If that worked, we will", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT", "requests import sys from openidc_client import release # The ports", "useragent=None, cachedir=None, printfd=sys.stdout): \"\"\"Client for interacting with web services relying", "to next one continue def _get_new_token(self, scopes): \"\"\"This function kicks", "the WEB_PORTS, and then either show the user a URL,", "OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "may not be held by anyone. :param token: Dict of", "self.debug('Trying to get token with scopes %s', scopes) for uuid", "publish, distribute, sublicense, and/or sell # copies of the Software,", "should still be valid, it may have been revoked by", "use POST submission of client secrets rather than Authorization header", "to the following conditions: # # The above copyright notice", "self._cache[self.last_returned_uuid]['access_token'] def send_request(self, *args, **kwargs): \"\"\"Make an python-requests POST request.", "that we had requested with the token def __init__(self, app_identifier,", "tokens were found \"\"\" possible_token = None self.debug('Trying to get", "# Internal implementation of tokens: # Every app id has", "self.debug('Incorrect idp') continue if not set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope: %s not", "to cache on disk. Requires cache_lock to be held by", "it to the valid cache, and then return the UUID.", "our webserver WEB_PORTS = [12345, 23456] class OpenIDCClient(object): # Internal", "self.debug('Possible') possible_token = (uuid, token) if possible_token: self.debug('Returning possible token')", "# to use, copy, modify, merge, publish, distribute, sublicense, and/or", "%s', scopes) for uuid in self._cache: self.debug('Checking %s', uuid) token", "uuid): \"\"\"Removes a token from the cache and writes cache", "request failed due to a server error or because the", "from openidc_client import release # The ports that we will", "= self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() self._retrieved_code", "(or we got another error), we will return None. \"\"\"", "visit %s to grant authorization' % authz_url, file=self._printfd) webbrowser.open(authz_url) server.handle_request()", "{} ckwargs['data']['access_token'] = token else: if 'headers' not in ckwargs:", "self._idp_url('Token'), data=data) resp.raise_for_status() resp = resp.json() if 'error' in resp:", "need to be updated \"\"\" self.debug('Updating token %s in cache,", "try: from StringIO import StringIO except ImportError: from io import", "\"Software\"), to deal # in the Software without restriction, including", ":param uuid: The UUID of the cached token to attempt", "if 'error' in kv: self.debug('Error code returned: %s (%s)', kv['error'],", "self.debug('Missing scope: %s not subset of %s', set(scopes), set(token['scopes'])) continue", "requests.request(method, *args, **ckwargs) if resp.status_code == 401 and not is_retry:", "expires_at is still before the current time, but if no", "the cache if renewal was succesful. :param uuid: The UUID", "to refresh, error: %s', resp['error']) return False self._update_token( uuid, {'access_token':", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "'error' in resp: self.debug('Unable to refresh, error: %s', resp['error']) return", "cache_lock to be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Writing cache", "token def __init__(self, app_identifier, id_provider, id_provider_mapping, client_id, client_secret=None, use_post=False, useragent=None,", "indication from your application that the token you sent was", "self.debug('Checking %s', uuid) token = self._cache[uuid] if token['idp'] != self.idp:", "return None # We did not have a valid token,", "any others we have self.debug('Not yet expired, returning') return uuid,", "Authorization endpoint of the IdP with a request for our", "else: self.problem_reported = True return self._cache[self.last_returned_uuid]['access_token'] def send_request(self, *args, **kwargs):", "\"\"\"Removes a token from the cache and writes cache to", "return the new access token. If it did not work,", "import StringIO import socket import os try: from urllib import", "did not work, we will return None and remove this", "granted, free of charge, to any person obtaining a copy", "self._get_token_with_scopes(scopes) if token: # If we had a valid token,", "self.send_request(*args, **kwargs) elif resp.status_code == 401: # We got a", "= '%s?%s' % (self._idp_url('Authorization'), query) print('Please visit %s to grant", "returned: %s (%s)', kv['error'], kv.get('error_description')) self._retrieved_code = False else: self._retrieved_code", "uuid) token = self._cache[uuid] if token['idp'] != self.idp: self.debug('Incorrect idp')", "attempt to refresh. :rtype: bool :returns: True if the token", "do not already have on. :kwarg auto_refresh: If False, will", ":kwarg scopes: A list of scopes required for the current", "we got another error), we will return None. \"\"\" def", "\"\"\"Refreshes the self._cache from the cache on disk. cache_lock may", "**kwargs) elif resp.status_code == 401: # We got a 401", "that has the required scopes, we will return it. This", "self.idp_mapping[method] else: return ValueError('Idp Mapping did not include path for", "is a token that's supposed to still be valid, prefer", "one of the WEB_PORTS, and then either show the user", "the valid cache, and then return the UUID. If the", "idp') continue if not set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope: %s not subset", "return to the CLI'.encode('ascii')] self._retrieved_code = None server = self._get_server(_token_app)", "uuid = uuidgen().hex self.debug('Adding token %s to cache', uuid) with", "we already have a token with the current app_identifier that", "webserver WEB_PORTS = [12345, 23456] class OpenIDCClient(object): # Internal implementation", "_token_app(environ, start_response): query = environ['QUERY_STRING'] split = query.split('&') kv =", "raise ValueError('Cannot provide json in a post call') if method", "list') token = self._get_token_with_scopes(scopes) if token: # If we had", "uuid = self._get_new_token(scopes) if uuid: self.last_returned_uuid = uuid self.problem_reported =", "import simple_server import requests import sys from openidc_client import release", "the refreshed token in self._cache The caller is responsible for", "= True return self._cache[self.last_returned_uuid]['access_token'] def send_request(self, *args, **kwargs): \"\"\"Make an", "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "if possible or None \"\"\" if not isinstance(scopes, list): raise", "be updated \"\"\" self.debug('Updating token %s in cache, fields %s',", "return True def _get_server(self, app): \"\"\"This function returns a SimpleServer", "want to call report_token_issue() to try to renew the token", "incorrect cases. :kwargs http_method: The HTTP method to use, defaults", "request. Allarguments and keyword arguments are like the arguments to", "\"python-openidc-client/VERSION\" :kwarg cachedir: The directory in which to store the", "IdP mapping dict. :rtype: string :returns: The IdP URL \"\"\"", "\"\"\"Adds a token to the cache and writes cache to", "possible_token = (uuid, token) if possible_token: self.debug('Returning possible token') return", "os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id) def __refresh_cache(self): \"\"\"Refreshes the self._cache from", "urlencode(rquery) authz_url = '%s?%s' % (self._idp_url('Authorization'), query) print('Please visit %s", "ckwargs['headers'] = {} ckwargs['headers']['Authorization'] = 'Bearer %s' % token resp", "automatically report token issues on 401. This helps with broken", "'oidc_%s.json' % self.app_id) def __refresh_cache(self): \"\"\"Refreshes the self._cache from the", "ValueError('Idp Mapping did not include path for %s' % method)", "resp.raise_for_status() resp = resp.json() if 'error' in resp: self.debug('Unable to", "TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR", "+ int(resp['expires_in']), 'idp': self.idp, 'token_type': resp['token_type'], 'scopes': scopes} # AND", "uuid not in self._cache: return None self._cache[uuid].update(toupdate) self.__write_cache() return uuid", "Allarguments and keyword arguments are like the arguments to requests,", "True def _get_server(self, app): \"\"\"This function returns a SimpleServer with", "if uuid in self._cache: self.debug('Removing token') del self._cache[uuid] self.__write_cache() else:", "rights # to use, copy, modify, merge, publish, distribute, sublicense,", "already have on. :kwarg auto_refresh: If False, will not try", "# This is a token that's supposed to still be", "self._cache[uuid].update(toupdate) self.__write_cache() return uuid def _delete_token(self, uuid): \"\"\"Removes a token", "scopes, we will return it. This function will return a", "cache to disk. cache_lock may not be held by anyone.", "caches. Will be put through expanduer. Default is ~/.openidc. If", "and we are unable to create it, the OSError will", "tokens that have the requested scopes. It will prefer to", "self._cache = json.loads(f.read()) self.debug('Loaded %i tokens', len(self._cache)) def _refresh_cache(self): \"\"\"Refreshes", "try to use for our webserver WEB_PORTS = [12345, 23456]", "%s' % token resp = requests.request(method, *args, **ckwargs) if resp.status_code", "# # The above copyright notice and this permission notice", "def _refresh_cache(self): \"\"\"Refreshes the self._cache from the cache on disk.", "open(self._cachefile, 'r') as f: self._cache = json.loads(f.read()) self.debug('Loaded %i tokens',", "token['idp'] != self.idp: self.debug('Incorrect idp') continue if not set(scopes).issubset(set(token['scopes'])): self.debug('Missing", "We were reported an issue before. Let's just remove this", "prefer to return tokens whose expires_at is still before the", "method: The method name in the IdP mapping dict. :rtype:", "to a server error or because the account or token", "cache_lock may not be held by anyone. :param token: UUID", "copies of the Software, and to permit persons to whom", "idp that issued the token # sub: The subject that", "time, but if no such tokens exist it will return", "resp = resp.json() if 'error' in resp: self.debug('Unable to refresh,", "catch the return with either an Authorization Code (that we", "return code in incorrect cases. :kwargs http_method: The HTTP method", "uuidgen import webbrowser from wsgiref import simple_server import requests import", "app_identifier, id_provider, id_provider_mapping, client_id, client_secret=None, use_post=False, useragent=None, cachedir=None, printfd=sys.stdout): \"\"\"Client", "will be the Authorization endpoint of the IdP with a", "that was last returned. If that worked, we will return", "If the user cancelled (or we got another error), we", ":param method: The method name in the IdP mapping dict.", "OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "port in WEB_PORTS: try: server = simple_server.make_server('0.0.0.0', port, app) return", "self._valid_cache = [] self._printfd = printfd def get_token(self, scopes, new_token=True):", "token you sent was invalid, you should call it. You", "the request self._retrieved_code = None self.debug('User cancelled') return None self.debug('We", "we will return the new access token. If it did", "exchanging authorization code: %s', resp['error']) return None token = {'access_token':", "for any tokens that have the requested scopes. It will", "the cache \"\"\" uuid = uuidgen().hex self.debug('Adding token %s to", "cancellation message. This function will store the new token in", "resp.json() if 'error' in resp: self.debug('Error exchanging authorization code: %s',", "the current client. :kwarg new_token: If True, we will actively", "for applications relying on OpenID Connect for authentication.\"\"\" from __future__", "a secret. :kwarg useragent: Useragent string to use. If not", ":kwarg useragent: Useragent string to use. If not provided, defaults", "relying on OpenID Connect for authentication.\"\"\" from __future__ import print_function", "True) method = ckwargs.pop('http_method', 'POST') is_retry = False if self.token_to_try:", "The ports that we will try to use for our", "return None else: self.problem_reported = True return self._cache[self.last_returned_uuid]['access_token'] def send_request(self,", "with self._cache_lock: self.__refresh_cache() if uuid in self._cache: self.debug('Removing token') del", "our client_id to get a new token with the specified", "auto_refresh = ckwargs.pop('auto_refresh', True) method = ckwargs.pop('http_method', 'POST') is_retry =", "toupdate): \"\"\"Updates a token in the cache. cache_lock may not", "dict contains uuid: token pairs # Every \"token\" object is", "self.app_id = app_identifier self.use_post = use_post self.idp = id_provider self.idp_mapping", "cached token to attempt to refresh. :rtype: bool :returns: True", "import socket import os try: from urllib import urlencode except", "whenever this file is touched, the cache lock is held", "self._retrieved_code is False: # The user cancelled the request self._retrieved_code", "copy, modify, merge, publish, distribute, sublicense, and/or sell # copies", "indicates the token should still be valid, it may have", "to print token instructions to. \"\"\" self.logger = logging.getLogger(__name__) self.debug", "Client Identifier used to request credentials :kwarg client_secret: The client", "We did not have a valid token, now comes the", "is a retry. Report error self.report_token_issue() return resp else: return", "= uuidgen().hex self.debug('Adding token %s to cache', uuid) with self._cache_lock:", ":kwarg cachedir: The directory in which to store the token", "token with the specified scopes. The webserver will then need", "coding: utf-8 -*- # # Copyright (C) 2016, 2017 Red", "scopes required for the current client. :kwarg new_token: If True,", "software and associated documentation files (the \"Software\"), to deal #", "the IdP. :kwarg use_post: Whether to use POST submission of", "ckwargs: ckwargs['data'] = {} ckwargs['data']['access_token'] = token else: if 'headers'", "False self._update_token( uuid, {'access_token': resp['access_token'], 'token_type': resp['token_type'], 'refresh_token': resp['refresh_token'], 'expires_at':", "in a post call') if method not in ['POST']: raise", "\"\"\"Searches the cache for any tokens that have the requested", "auto_refresh: If False, will not try to automatically report token", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR", "import print_function from copy import copy import json import logging", "the Software without restriction, including without limitation the rights #", "urlencode except ImportError: from urllib.parse import urlencode from uuid import", "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "is touched, the cache lock is held \"\"\" assert self._cache_lock.locked()", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #", "the IdP URL for the requested method. :param method: The", "%s', resp['error']) return False self._update_token( uuid, {'access_token': resp['access_token'], 'token_type': resp['token_type'],", "should call it. You should explicitly NOT call this function", "= None self.debug('Trying to get token with scopes %s', scopes)", "'json' in ckwargs: raise ValueError('Cannot provide json in a post", "until a token is retrieved if requested. It is always", "to refresh a token and put the refreshed token in", "token, a new one will be requested unless nonblocking is", "# of this software and associated documentation files (the \"Software\"),", "furnished to do so, subject to the following conditions: #", "If True, we will actively request the user to get", "got another error), we will return None. \"\"\" def _token_app(environ,", "# The above copyright notice and this permission notice shall", "with an available WEB_PORT.\"\"\" for port in WEB_PORTS: try: server", "client_secret: The client \"secret\" that goes with the client_id. May", "authorization code!') data = {'client_id': self.client_id, 'grant_type': 'authorization_code', 'redirect_uri': return_uri,", "if 'headers' not in ckwargs: ckwargs['headers'] = {} ckwargs['headers']['Authorization'] =", "True if the token was succesfully refreshed, False otherwise \"\"\"", "tokens exist it will return the possibly expired token: it", "data['client_secret'] = self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status()", "# This is a token that may or may not", "resp['expires_in']}) self.debug('Refreshed until %s', self._cache[uuid]['expires_at']) return True def _get_server(self, app):", "if possible, kick off their browser. This URL will be", ":returns: The IdP URL \"\"\" if method in self.idp_mapping: return", "False otherwise \"\"\" oldtoken = self._cache[uuid] self.debug('Refreshing token %s', uuid)", "client_id: The Client Identifier used to request credentials :kwarg client_secret:", "possible or None \"\"\" if not isinstance(scopes, list): raise ValueError('Scopes", "self.debug('Not yet expired, returning') return uuid, token # This is", "updated :param toupdate: Dict indicating which fields need to be", "# idp: The URL of the idp that issued the", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "held by anyone. :param token: UUID of the token to", "# over any others we have self.debug('Not yet expired, returning')", "your IdP does not require you to use a secret.", "= False return token[1]['access_token'] elif not new_token: return None #", "required. :kwarg scopes: Scopes required for this call. If a", "following conditions: # # The above copyright notice and this", "This function will block until a token is retrieved if", "was last returned. If that worked, we will return the", "bearer token or None. Note that the bearer token might", "self.debug('Removing token') del self._cache[uuid] self.__write_cache() else: self.debug('Token was already gone')", "the token # sub: The subject that owns the token", "conditions: # # The above copyright notice and this permission", "# indicates the token should still be valid, it may", "multiple invocations self._cache_lock = Lock() with self._cache_lock: self.__refresh_cache() self._valid_cache =", "in incorrect cases. :kwargs http_method: The HTTP method to use,", "self.client_id, 'grant_type': 'authorization_code', 'redirect_uri': return_uri, 'code': self._retrieved_code} if self.client_secret: data['client_secret']", "able to # refresh the token. # refresh_token: The token", "self.app_id) def __refresh_cache(self): \"\"\"Refreshes the self._cache from the cache on", "self._cache from the cache on disk. cache_lock may not be", "held by anyone.\"\"\" with self._cache_lock: self.__refresh_cache() def __write_cache(self): \"\"\"Wirtes self._cache", "%s', uuid, toupdate.keys()) with self._cache_lock: self.__refresh_cache() if uuid not in", "time.time() + resp['expires_in']}) self.debug('Refreshed until %s', self._cache[uuid]['expires_at']) return True def", "# -*- coding: utf-8 -*- # # Copyright (C) 2016,", "webbrowser from wsgiref import simple_server import requests import sys from", "to be updated \"\"\" self.debug('Updating token %s in cache, fields", "over any others we have self.debug('Not yet expired, returning') return", "'idp': self.idp, 'token_type': resp['token_type'], 'scopes': scopes} # AND WE ARE", "or 'python-openid-client/%s' % \\ release.VERSION self.cachedir = os.path.expanduser(cachedir or '~/.openidc')", "with either an Authorization Code (that we will exchange for", "token. :kwarg scopes: A list of scopes required for the", "self._retrieved_code = None self.debug('User cancelled') return None self.debug('We got an", "be valid, it may have been revoked by # the", "any tokens that have the requested scopes. It will prefer", "resp @property def _cachefile(self): \"\"\"Property to get the cache file", "token caches. Will be put through expanduer. Default is ~/.openidc.", "though, since if we already have a token with the", "True return self._cache[self.last_returned_uuid]['access_token'] def send_request(self, *args, **kwargs): \"\"\"Make an python-requests", "if token['expires_at'] < time.time(): # This is a token that's", "in ckwargs: raise ValueError('Cannot provide json in a post call')", "`auto_refresh` keyword arguments. `scopes` is required. :kwarg scopes: Scopes required", "not refresh_result: self._delete_token(self.last_returned_uuid) return None else: self.problem_reported = True return", "self.problem_reported = False return self._cache[uuid]['access_token'] def report_token_issue(self): \"\"\"Report an error", "with web services relying on OpenID Connect. :param app_identifier: Identifier", "with the current app_identifier that has the required scopes, we", "and not is_retry: if not auto_refresh: return resp self.token_to_try =", "`new_token` and `auto_refresh` keyword arguments. `scopes` is required. :kwarg scopes:", "scopes that we had requested with the token def __init__(self,", "not be held by anyone.\"\"\" with self._cache_lock: self.__refresh_cache() def __write_cache(self):", "in WEB_PORTS: try: server = simple_server.make_server('0.0.0.0', port, app) return server", "it, the OSError will be thrown. :kwargs printfd: The File", "token or delete the token. :kwarg scopes: A list of", "be included in # all copies or substantial portions of", "and `auto_refresh` keyword arguments. `scopes` is required. :kwarg scopes: Scopes", "'POST') is_retry = False if self.token_to_try: is_retry = True token", "for storage of retrieved tokens :param id_provider: URL of the", "kv['error'], kv.get('error_description')) self._retrieved_code = False else: self._retrieved_code = kv['code'] #", "self.client_id = client_id self.client_secret = client_secret self.useragent = useragent or", "The HTTP method to use, defaults to POST.. \"\"\" ckwargs", "with open(self._cachefile, 'r') as f: self._cache = json.loads(f.read()) self.debug('Loaded %i", "% authz_url, file=self._printfd) webbrowser.open(authz_url) server.handle_request() server.server_close() assert self._retrieved_code is not", "A list of scopes required for the current client. :kwarg", "to renew the token that was last returned. If that", "application that the token you sent was invalid, you should", "from the cache on disk. Requires cache_lock to be held", "open(self._cachefile, 'w') as f: f.write(json.dumps({})) with open(self._cachefile, 'r') as f:", "webserver will then need to catch the return with either", "self.token_to_try = None self._retrieved_code = None # TODO: Make cache_lock", "not isinstance(scopes, list): raise ValueError('Scopes must be a list') token", "was succesful. :param uuid: The UUID of the cached token", "a post call') if method not in ['POST']: raise ValueError('Cannot", "json.loads(f.read()) self.debug('Loaded %i tokens', len(self._cache)) def _refresh_cache(self): \"\"\"Refreshes the self._cache", "without restriction, including without limitation the rights # to use,", "# # Permission is hereby granted, free of charge, to", "request self._retrieved_code = None self.debug('User cancelled') return None self.debug('We got", "be None if your IdP does not require you to", "= self.report_token_issue() if not self.token_to_try: return resp return self.send_request(*args, **kwargs)", "disk. cache_lock may not be held by anyone. :param uuid:", "subject to the following conditions: # # The above copyright", "refresh_token: The token we can use to refresh the access", "the new access token. If it did not work, we", "Authorization header :kwarg client_id: The Client Identifier used to request", "not try to automatically report token issues on 401. This", "submission of client secrets rather than Authorization header :kwarg client_id:", "token. If it did not work, we will return None", "or expired. In that case, you will want to call", "invalid, you should call it. You should explicitly NOT call", "def _delete_token(self, uuid): \"\"\"Removes a token from the cache and", "return possible_token def _idp_url(self, method): \"\"\"Returns the IdP URL for", "False, will not try to automatically report token issues on", "this file is touched, the cache lock is held \"\"\"", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE", "{'client_id': self.client_id, 'grant_type': 'refresh_token', 'refresh_token': oldtoken['refresh_token']} if self.client_secret: data['client_secret'] =", "return resp self.token_to_try = self.report_token_issue() if not self.token_to_try: return resp", "remove this token. self._delete_token(self.last_returned_uuid) return None refresh_result = self._refresh_token(self.last_returned_uuid) if", "the arguments to requests, except for `scopes`, `new_token` and `auto_refresh`", "are unable to create it, the OSError will be thrown.", "self.debug('Creating directory') os.makedirs(self.cachedir) if not os.path.exists(self._cachefile): self.debug('Creating file') with open(self._cachefile,", ":param id_provider: URL of the identity provider to get tokens", "<NAME> <<EMAIL>> # # Permission is hereby granted, free of", "cache file name for the current client. This assures that", "method): \"\"\"Returns the IdP URL for the requested method. :param", "copy import copy import json import logging from threading import", "for our webserver WEB_PORTS = [12345, 23456] class OpenIDCClient(object): #", "cache', uuid) with self._cache_lock: self.__refresh_cache() self._cache[uuid] = token self.__write_cache() return", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "'Bearer %s' % token resp = requests.request(method, *args, **ckwargs) if", "Internal implementation of tokens: # Every app id has its", "was lacking specific permissions. \"\"\" if not self.last_returned_uuid: raise Exception('Cannot", "cache_lock to be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Refreshing cache')", "require you to use a secret. :kwarg useragent: Useragent string", "token %s in cache, fields %s', uuid, toupdate.keys()) with self._cache_lock:", "to retrieve tokens with specific scopes. This function will block", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "arguments. `scopes` is required. :kwarg scopes: Scopes required for this", "code in incorrect cases. :kwargs http_method: The HTTP method to", "= self._cache[uuid] if token['idp'] != self.idp: self.debug('Incorrect idp') continue if", "token = {'access_token': resp['access_token'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + int(resp['expires_in']),", "to be in the returned token :rtype: (string, dict) or", "already have a token with the current app_identifier that has", "self._retrieved_code} if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request( 'POST',", "self._retrieved_code = None server = self._get_server(_token_app) if not server: raise", "merge, publish, distribute, sublicense, and/or sell # copies of the", "token, use that self.last_returned_uuid = token[0] self.problem_reported = False return", "True token = self.token_to_try self.token_to_try = None else: token =", "with scopes %s', scopes) for uuid in self._cache: self.debug('Checking %s',", "serialized dict # This dict contains uuid: token pairs #", "were reported an issue before. Let's just remove this token.", "\"Bearer\" # expires_at: Token expiration UTC time. NOTE: Even if", "are like the arguments to requests, except for `scopes`, `new_token`", "NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT", "this function if the token was valid but your request", "is a json serialized dict # This dict contains uuid:", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR", "client. :kwarg new_token: If True, we will actively request the", "in ckwargs: ckwargs['headers'] = {} ckwargs['headers']['Authorization'] = 'Bearer %s' %", "-*- # # Copyright (C) 2016, 2017 Red Hat, Inc.", "the cache on disk. Requires cache_lock to be held by", "self._retrieved_code = kv['code'] # Just return a message start_response('200 OK',", "'authorization_code', 'redirect_uri': return_uri, 'code': self._retrieved_code} if self.client_secret: data['client_secret'] = self.client_secret", ":kwargs printfd: The File object to print token instructions to.", "not have a valid token, now comes the hard part...", "else: self._retrieved_code = kv['code'] # Just return a message start_response('200", "it will return the possibly expired token: it might be", "OTHER DEALINGS IN THE # SOFTWARE. \"\"\"Client for applications relying", "may have been revoked by # the user! Also, even", "last token that was returned. This will attempt to renew", "we do not already have on. :kwarg auto_refresh: If False,", "of the idp that issued the token # sub: The", "comes the hard part... uuid = self._get_new_token(scopes) if uuid: self.last_returned_uuid", "this window and return to the CLI'.encode('ascii')] self._retrieved_code = None", "Token UUID and contents or None if no applicable tokens", "include path for %s' % method) def _refresh_token(self, uuid): \"\"\"Tries", "SOFTWARE. \"\"\"Client for applications relying on OpenID Connect for authentication.\"\"\"", "the UUID. If the user cancelled (or we got another", "resp: self.debug('Error exchanging authorization code: %s', resp['error']) return None token", "owns the token # access_token: Token value # token_type: Token", "be able to # refresh the token. # refresh_token: The", "DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF", "'w') as f: f.write(json.dumps({})) with open(self._cachefile, 'r') as f: self._cache", "and writes cache to disk. cache_lock may not be held", "scopes): \"\"\"Searches the cache for any tokens that have the", "list of scopes required for the current client. :kwarg new_token:", "from the cache on disk. cache_lock may not be held", "the current app_identifier that has the required scopes, we will", "rquery['scope'] = ' '.join(scopes) rquery['response_type'] = 'code' rquery['client_id'] = self.client_id", "Authorization Code (that we will exchange for an access token)", "anyone.\"\"\" with self._cache_lock: self.__refresh_cache() def __write_cache(self): \"\"\"Wirtes self._cache to cache", "the cache file name for the current client. This assures", "\"\"\"Wirtes self._cache to cache on disk. Requires cache_lock to be", "cache, fields %s', uuid, toupdate.keys()) with self._cache_lock: self.__refresh_cache() if uuid", "use for specific endpoints on the IdP. :kwarg use_post: Whether", "as f: self._cache = json.loads(f.read()) self.debug('Loaded %i tokens', len(self._cache)) def", "id_provider, id_provider_mapping, client_id, client_secret=None, use_post=False, useragent=None, cachedir=None, printfd=sys.stdout): \"\"\"Client for", "= None self.debug('User cancelled') return None self.debug('We got an authorization", "= [12345, 23456] class OpenIDCClient(object): # Internal implementation of tokens:", "This dict contains uuid: token pairs # Every \"token\" object", "storage of retrieved tokens :param id_provider: URL of the identity", "and this is a retry. Report error self.report_token_issue() return resp", "True) auto_refresh = ckwargs.pop('auto_refresh', True) method = ckwargs.pop('http_method', 'POST') is_retry", "be put through expanduer. Default is ~/.openidc. If this does", "\"token\" object is a json dict with the following keys:", "USE OR OTHER DEALINGS IN THE # SOFTWARE. \"\"\"Client for", "+ self.idp_mapping[method] else: return ValueError('Idp Mapping did not include path", "valid cache, and then return the UUID. If the user", "not token: return None if self.use_post: if 'json' in ckwargs:", "token = self._cache[uuid] if token['idp'] != self.idp: self.debug('Incorrect idp') continue", "token) if possible_token: self.debug('Returning possible token') return possible_token def _idp_url(self,", "persons to whom the Software is # furnished to do", "and put the refreshed token in self._cache The caller is", "user cancelled (or we got another error), we will return", ":param app_identifier: Identifier for storage of retrieved tokens :param id_provider:", "ckwargs.pop('scopes') new_token = ckwargs.pop('new_token', True) auto_refresh = ckwargs.pop('auto_refresh', True) method", "we might still be able to # refresh the token.", "__init__(self, app_identifier, id_provider, id_provider_mapping, client_id, client_secret=None, use_post=False, useragent=None, cachedir=None, printfd=sys.stdout):", "method to use, defaults to POST.. \"\"\" ckwargs = copy(kwargs)", "arguments to requests, except for `scopes`, `new_token` and `auto_refresh` keyword", "will return the possibly expired token: it might be refreshable.", "explicitly NOT call this function if the token was valid", "the current scopeset if we do not already have on.", "scopes. The webserver will then need to catch the return", "provide json in a post call') if method not in", "OpenID Connect. :param app_identifier: Identifier for storage of retrieved tokens", "This URL will be the Authorization endpoint of the IdP", "return with either an Authorization Code (that we will exchange", "expired, we might still be able to # refresh the", "= ckwargs.pop('scopes') new_token = ckwargs.pop('new_token', True) auto_refresh = ckwargs.pop('auto_refresh', True)", "_add_token(self, token): \"\"\"Adds a token to the cache and writes", "self.client_id rquery['redirect_uri'] = return_uri rquery['response_mode'] = 'query' query = urlencode(rquery)", "ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN", "cancelled') return None self.debug('We got an authorization code!') data =", "unless nonblocking is True. :kwarg new_token: If True, we will", "% method) if 'data' not in ckwargs: ckwargs['data'] = {}", "still be valid self.debug('Possible') possible_token = (uuid, token) if possible_token:", "environ['QUERY_STRING'] split = query.split('&') kv = dict([v.split('=', 1) for v", "self.client_secret: data['client_secret'] = self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data)", "required scopes, we will return it. This function will return", "requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() resp = resp.json() if 'error'", "THE USE OR OTHER DEALINGS IN THE # SOFTWARE. \"\"\"Client", "URLs to use for specific endpoints on the IdP. :kwarg", "False: # The user cancelled the request self._retrieved_code = None", "the token was succesfully refreshed, False otherwise \"\"\" oldtoken =", "import StringIO except ImportError: from io import StringIO import socket", "a token in the cache. cache_lock may not be held", "anyone. :param uuid: UUID of the token to be removed", "json dict with the following keys: # idp: The URL", "not in ['POST']: raise ValueError('Cannot use POST tokens in %s", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "substantial portions of the Software. # # THE SOFTWARE IS", "specific scopes. This function will block until a token is", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "or '~/.openidc') self.last_returned_uuid = None self.problem_reported = False self.token_to_try =", "= Lock() with self._cache_lock: self.__refresh_cache() self._valid_cache = [] self._printfd =", "a token from the cache and writes cache to disk.", "token') del self._cache[uuid] self.__write_cache() else: self.debug('Token was already gone') def", "any person obtaining a copy # of this software and", "ARISING FROM, # OUT OF OR IN CONNECTION WITH THE", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "failed due to a server error or because the account", "token issues on 401. This helps with broken apps that", "data = {'client_id': self.client_id, 'grant_type': 'refresh_token', 'refresh_token': oldtoken['refresh_token']} if self.client_secret:", "to get tokens from :param id_provider_mapping: Mapping with URLs to", "client_secret=None, use_post=False, useragent=None, cachedir=None, printfd=sys.stdout): \"\"\"Client for interacting with web", "if not os.path.exists(self._cachefile): self.debug('Creating file') with open(self._cachefile, 'w') as f:", "kicks off some magic. We will start a new webserver", "not present with this token, a new one will be", "__refresh_cache(self): \"\"\"Refreshes the self._cache from the cache on disk. Requires", "self._cache[uuid] self.debug('Refreshing token %s', uuid) data = {'client_id': self.client_id, 'grant_type':", "Every app id has its own token cache # The", "If this does not exist and we are unable to", "Connect. :param app_identifier: Identifier for storage of retrieved tokens :param", "return False self._update_token( uuid, {'access_token': resp['access_token'], 'token_type': resp['token_type'], 'refresh_token': resp['refresh_token'],", "# sub: The subject that owns the token # access_token:", "self.debug('Returning possible token') return possible_token def _idp_url(self, method): \"\"\"Returns the", "self.debug('Updating token %s in cache, fields %s', uuid, toupdate.keys()) with", "copyright notice and this permission notice shall be included in", "json serialized dict # This dict contains uuid: token pairs", "may send a 401 return code in incorrect cases. :kwargs", "will attempt to renew the token that was last returned.", "f: self._cache = json.loads(f.read()) self.debug('Loaded %i tokens', len(self._cache)) def _refresh_cache(self):", "self._cache[uuid] self.__write_cache() else: self.debug('Token was already gone') def _get_token_with_scopes(self, scopes):", "files (the \"Software\"), to deal # in the Software without", "URL \"\"\" if method in self.idp_mapping: return self.idp + self.idp_mapping[method]", "return_uri, 'code': self._retrieved_code} if self.client_secret: data['client_secret'] = self.client_secret resp =", "retry. Report error self.report_token_issue() return resp else: return resp @property", "expired, returning') return uuid, token # This is a token", "If we had a valid token, use that self.last_returned_uuid =", "def _get_server(self, app): \"\"\"This function returns a SimpleServer with an", "post call') if method not in ['POST']: raise ValueError('Cannot use", "= self.token_to_try self.token_to_try = None else: token = self.get_token(scopes, new_token=new_token)", "was already gone') def _get_token_with_scopes(self, scopes): \"\"\"Searches the cache for", "2017 Red Hat, Inc. # Red Hat Author: <NAME> <<EMAIL>>", "False return self._cache[uuid]['access_token'] def report_token_issue(self): \"\"\"Report an error with the", "scopeset if we do not already have on. :rtype: string", "# refresh the token. # refresh_token: The token we can", "WEB_PORTS = [12345, 23456] class OpenIDCClient(object): # Internal implementation of", "always safe to call this though, since if we already", "resp['token_type'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + resp['expires_in']}) self.debug('Refreshed until %s',", "\"\"\"Updates a token in the cache. cache_lock may not be", "held by anyone. :param uuid: UUID of the token to", "id_provider_mapping, client_id, client_secret=None, use_post=False, useragent=None, cachedir=None, printfd=sys.stdout): \"\"\"Client for interacting", "isinstance(scopes, list): raise ValueError('Scopes must be a list') token =", "\"\"\" uuid = uuidgen().hex self.debug('Adding token %s to cache', uuid)", "indicating which fields need to be updated \"\"\" self.debug('Updating token", "import urlencode from uuid import uuid4 as uuidgen import webbrowser", "keyword arguments. `scopes` is required. :kwarg scopes: Scopes required for", "be refreshable. :param scopes: List of scopes that need to", "uuid): \"\"\"Tries to refresh a token and put the refreshed", "**ckwargs) if resp.status_code == 401 and not is_retry: if not", "return_uri rquery['response_mode'] = 'query' query = urlencode(rquery) authz_url = '%s?%s'", "uuid def _update_token(self, uuid, toupdate): \"\"\"Updates a token in the", "with the current scopeset if we do not already have", "TODO: Make cache_lock a filesystem lock so we also lock", "not self.token_to_try: return resp return self.send_request(*args, **kwargs) elif resp.status_code ==", "of this software and associated documentation files (the \"Software\"), to", "to cache', uuid) with self._cache_lock: self.__refresh_cache() self._cache[uuid] = token self.__write_cache()", ":rtype: bool :returns: True if the token was succesfully refreshed,", "EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "use POST tokens in %s method' % method) if 'data'", "cancelled (or we got another error), we will return None.", "held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Writing cache with %i tokens',", "local cache, add it to the valid cache, and then", "call') if method not in ['POST']: raise ValueError('Cannot use POST", "self.idp: self.debug('Incorrect idp') continue if not set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope: %s", "self.last_returned_uuid = token[0] self.problem_reported = False return token[1]['access_token'] elif not", "token): \"\"\"Adds a token to the cache and writes cache", "could not be refreshed or saving the cache if renewal", "cancelled the request self._retrieved_code = None self.debug('User cancelled') return None", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS", "id_provider_mapping: Mapping with URLs to use for specific endpoints on", "self.problem_reported = True return self._cache[self.last_returned_uuid]['access_token'] def send_request(self, *args, **kwargs): \"\"\"Make", "is_retry: if not auto_refresh: return resp self.token_to_try = self.report_token_issue() if", "access token # scopes: A list of scopes that we", "be requested unless nonblocking is True. :kwarg new_token: If True,", "resp else: return resp @property def _cachefile(self): \"\"\"Property to get", "the token. # refresh_token: The token we can use to", "return self.idp + self.idp_mapping[method] else: return ValueError('Idp Mapping did not", "cache if renewal was succesful. :param uuid: The UUID of", "threading import Lock import time try: from StringIO import StringIO", "(uuid, token) if possible_token: self.debug('Returning possible token') return possible_token def", "reported an issue before. Let's just remove this token. self._delete_token(self.last_returned_uuid)", "app): \"\"\"This function returns a SimpleServer with an available WEB_PORT.\"\"\"", "self.idp + self.idp_mapping[method] else: return ValueError('Idp Mapping did not include", "query = environ['QUERY_STRING'] split = query.split('&') kv = dict([v.split('=', 1)", "self._printfd = printfd def get_token(self, scopes, new_token=True): \"\"\"Function to retrieve", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "whose expires_at is still before the current time, but if", "The IdP URL \"\"\" if method in self.idp_mapping: return self.idp", "`scopes`, `new_token` and `auto_refresh` keyword arguments. `scopes` is required. :kwarg", "Identifier for storage of retrieved tokens :param id_provider: URL of", "current scopeset if we do not already have on. :kwarg", "OR OTHER DEALINGS IN THE # SOFTWARE. \"\"\"Client for applications", "authz_url = '%s?%s' % (self._idp_url('Authorization'), query) print('Please visit %s to", "from cache \"\"\" self.debug('Removing token %s from cache', uuid) with", "may not be held by anyone.\"\"\" with self._cache_lock: self.__refresh_cache() def", "instantiate a webserver') return_uri = 'http://localhost:%i/' % server.socket.getsockname()[1] rquery =", "self.debug('Refreshing cache') if not os.path.isdir(self.cachedir): self.debug('Creating directory') os.makedirs(self.cachedir) if not", "'token_type': resp['token_type'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + resp['expires_in']}) self.debug('Refreshed until", "updated \"\"\" self.debug('Updating token %s in cache, fields %s', uuid,", "= self.logger.debug self.app_id = app_identifier self.use_post = use_post self.idp =", "use that self.last_returned_uuid = token[0] self.problem_reported = False return token[1]['access_token']", "# in the Software without restriction, including without limitation the", "like the arguments to requests, except for `scopes`, `new_token` and", "in the returned token :rtype: (string, dict) or None :returns:", "Also, even if it has expired, we might still be", "server = self._get_server(_token_app) if not server: raise Exception('We were unable", "= self._cache[uuid] self.debug('Refreshing token %s', uuid) data = {'client_id': self.client_id,", "ImportError: from urllib.parse import urlencode from uuid import uuid4 as", "its own token cache # The token cache is a", "= token[0] self.problem_reported = False return token[1]['access_token'] elif not new_token:", "method name in the IdP mapping dict. :rtype: string :returns:", "it. This function will return a bearer token or None.", "token: Dict of the token to be added to the", "refresh. :rtype: bool :returns: True if the token was succesfully", "self.report_token_issue() return resp else: return resp @property def _cachefile(self): \"\"\"Property", "sell # copies of the Software, and to permit persons", "a new one will be requested unless nonblocking is True.", "in cache, fields %s', uuid, toupdate.keys()) with self._cache_lock: self.__refresh_cache() if", ":kwarg scopes: Scopes required for this call. If a token", "or saving the cache if renewal was succesful. :param uuid:", "True. :kwarg new_token: If True, we will actively request the", "return None. \"\"\" def _token_app(environ, start_response): query = environ['QUERY_STRING'] split", "def _get_token_with_scopes(self, scopes): \"\"\"Searches the cache for any tokens that", "return ValueError('Idp Mapping did not include path for %s' %", "= kv['code'] # Just return a message start_response('200 OK', [('Content-Type',", "None self.debug('Trying to get token with scopes %s', scopes) for", "the local cache, add it to the valid cache, and", "caller.\"\"\" assert self._cache_lock.locked() self.debug('Refreshing cache') if not os.path.isdir(self.cachedir): self.debug('Creating directory')", "be added to the cache \"\"\" uuid = uuidgen().hex self.debug('Adding", "uuid in self._cache: self.debug('Checking %s', uuid) token = self._cache[uuid] if", "may or may not still be valid self.debug('Possible') possible_token =", "None :returns: Token UUID and contents or None if no", "new_token: If True, we will actively request the user to", "app id has its own token cache # The token", "not server: raise Exception('We were unable to instantiate a webserver')", "OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION", "OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "this is a retry. Report error self.report_token_issue() return resp else:", "[u'You can close this window and return to the CLI'.encode('ascii')]", "off some magic. We will start a new webserver on", "has the required scopes, we will return it. This function", "access token. If it did not work, we will return", "token resp = requests.request(method, *args, **ckwargs) if resp.status_code == 401", "URL for the requested method. :param method: The method name", "to renew the token or delete the token. :kwarg scopes:", "requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() self._retrieved_code = None resp =", "authz_url, file=self._printfd) webbrowser.open(authz_url) server.handle_request() server.server_close() assert self._retrieved_code is not None", "# refresh_token: The token we can use to refresh the", "object to print token instructions to. \"\"\" self.logger = logging.getLogger(__name__)", "not be held by anyone. :param token: Dict of the", "scopes: A list of scopes required for the current client.", "report token issues on 401. This helps with broken apps", "requested method. :param method: The method name in the IdP", "OK', [('Content-Type', 'text/plain')]) return [u'You can close this window and", "cache, and then return the UUID. If the user cancelled", "self.__refresh_cache() if uuid in self._cache: self.debug('Removing token') del self._cache[uuid] self.__write_cache()", "'code': self._retrieved_code} if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request(", "store the token caches. Will be put through expanduer. Default", "token is retrieved if requested. It is always safe to", "== 401: # We got a 401 and this is", "a token to the cache and writes cache to disk.", "self.debug = self.logger.debug self.app_id = app_identifier self.use_post = use_post self.idp", "self.token_to_try: return resp return self.send_request(*args, **kwargs) elif resp.status_code == 401:", "'refresh_token': oldtoken['refresh_token']} if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request(", "'.join(scopes) rquery['response_type'] = 'code' rquery['client_id'] = self.client_id rquery['redirect_uri'] = return_uri", "from your application that the token you sent was invalid,", "new token in the local cache, add it to the", "# copies of the Software, and to permit persons to", "token should still be valid, it may have been revoked", "set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope: %s not subset of %s', set(scopes), set(token['scopes']))", "self.debug('Adding token %s to cache', uuid) with self._cache_lock: self.__refresh_cache() self._cache[uuid]", "server error or because the account or token was lacking", "split = query.split('&') kv = dict([v.split('=', 1) for v in", "OSError will be thrown. :kwargs printfd: The File object to", "%s not subset of %s', set(scopes), set(token['scopes'])) continue if token['expires_at']", "Identifier used to request credentials :kwarg client_secret: The client \"secret\"", "resp.status_code == 401: # We got a 401 and this", "= use_post self.idp = id_provider self.idp_mapping = id_provider_mapping self.client_id =", "TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN", "token. # refresh_token: The token we can use to refresh", "in ckwargs: ckwargs['data'] = {} ckwargs['data']['access_token'] = token else: if", "copy # of this software and associated documentation files (the", "the hard part... uuid = self._get_new_token(scopes) if uuid: self.last_returned_uuid =", "dict # This dict contains uuid: token pairs # Every", "assert self._cache_lock.locked() self.debug('Writing cache with %i tokens', len(self._cache)) with open(self._cachefile,", "next one continue def _get_new_token(self, scopes): \"\"\"This function kicks off", "simple_server import requests import sys from openidc_client import release #", "Permission is hereby granted, free of charge, to any person", "thrown. :kwargs printfd: The File object to print token instructions", "self._cache_lock = Lock() with self._cache_lock: self.__refresh_cache() self._valid_cache = [] self._printfd", "%s from cache', uuid) with self._cache_lock: self.__refresh_cache() if uuid in", "exchange for an access token) or the cancellation message. This", "not in ckwargs: ckwargs['data'] = {} ckwargs['data']['access_token'] = token else:", "toupdate.keys()) with self._cache_lock: self.__refresh_cache() if uuid not in self._cache: return", "the current time, but if no such tokens exist it", "from :param id_provider_mapping: Mapping with URLs to use for specific", "% self.app_id) def __refresh_cache(self): \"\"\"Refreshes the self._cache from the cache", "ckwargs.pop('auto_refresh', True) method = ckwargs.pop('http_method', 'POST') is_retry = False if", "def __init__(self, app_identifier, id_provider, id_provider_mapping, client_id, client_secret=None, use_post=False, useragent=None, cachedir=None,", "self._cache: self.debug('Removing token') del self._cache[uuid] self.__write_cache() else: self.debug('Token was already", "dict([v.split('=', 1) for v in split]) if 'error' in kv:", "expired token: it might be refreshable. :param scopes: List of", "not exist and we are unable to create it, the", "bearer token if possible or None \"\"\" if not isinstance(scopes,", "import sys from openidc_client import release # The ports that", "id_provider_mapping self.client_id = client_id self.client_secret = client_secret self.useragent = useragent", "a new token with the current scopeset if we do", "to be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Writing cache with", "logging from threading import Lock import time try: from StringIO", "Code (that we will exchange for an access token) or", "The subject that owns the token # access_token: Token value", "\"\"\" oldtoken = self._cache[uuid] self.debug('Refreshing token %s', uuid) data =", "token cache # The token cache is a json serialized", "WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT", "error or because the account or token was lacking specific", "token is not present with this token, a new one", "to permit persons to whom the Software is # furnished", "used to request credentials :kwarg client_secret: The client \"secret\" that", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", "Mapping did not include path for %s' % method) def", "or the cancellation message. This function will store the new", "[12345, 23456] class OpenIDCClient(object): # Internal implementation of tokens: #", "is_retry = True token = self.token_to_try self.token_to_try = None else:", "# The user cancelled the request self._retrieved_code = None self.debug('User", "dict with the following keys: # idp: The URL of", "own token cache # The token cache is a json", "method) def _refresh_token(self, uuid): \"\"\"Tries to refresh a token and", "resp.raise_for_status() self._retrieved_code = None resp = resp.json() if 'error' in", "Token expiration UTC time. NOTE: Even if the expires_at #", "client. This assures that whenever this file is touched, the", "self._retrieved_code is not None if self._retrieved_code is False: # The", "if not server: raise Exception('We were unable to instantiate a", "server.server_close() assert self._retrieved_code is not None if self._retrieved_code is False:", "None resp = resp.json() if 'error' in resp: self.debug('Error exchanging", "current scopeset if we do not already have on. :rtype:", ":returns: String bearer token if possible or None \"\"\" if", "None self.debug('User cancelled') return None self.debug('We got an authorization code!')", "Token type. Currently supported: \"Bearer\" # expires_at: Token expiration UTC", "method) if 'data' not in ckwargs: ckwargs['data'] = {} ckwargs['data']['access_token']", "held by anyone. :param token: Dict of the token to", "writes cache to disk. cache_lock may not be held by", "token: it might be refreshable. :param scopes: List of scopes", "token # This is a token that may or may", "The UUID of the cached token to attempt to refresh.", "account or token was lacking specific permissions. \"\"\" if not", "the IdP mapping dict. :rtype: string :returns: The IdP URL", "\"\"\"This function returns a SimpleServer with an available WEB_PORT.\"\"\" for", "permissions. \"\"\" if not self.last_returned_uuid: raise Exception('Cannot report issue before", "it may have been revoked by # the user! Also,", "apps that may send a 401 return code in incorrect", "before the current time, but if no such tokens exist", "# token_type: Token type. Currently supported: \"Bearer\" # expires_at: Token", "the cache and writes cache to disk. cache_lock may not", "code: %s', resp['error']) return None token = {'access_token': resp['access_token'], 'refresh_token':", "succesfully refreshed, False otherwise \"\"\" oldtoken = self._cache[uuid] self.debug('Refreshing token", "uuid def _delete_token(self, uuid): \"\"\"Removes a token from the cache", "in ['POST']: raise ValueError('Cannot use POST tokens in %s method'", ":param uuid: UUID of the token to be removed from", "also lock across # multiple invocations self._cache_lock = Lock() with", "= printfd def get_token(self, scopes, new_token=True): \"\"\"Function to retrieve tokens", "False return token[1]['access_token'] elif not new_token: return None # We", "print token instructions to. \"\"\" self.logger = logging.getLogger(__name__) self.debug =", "401 return code in incorrect cases. :kwargs http_method: The HTTP", "= return_uri rquery['response_mode'] = 'query' query = urlencode(rquery) authz_url =", "use for our webserver WEB_PORTS = [12345, 23456] class OpenIDCClient(object):", "and associated documentation files (the \"Software\"), to deal # in", "If that worked, we will return the new access token.", "and to permit persons to whom the Software is #", "expiration UTC time. NOTE: Even if the expires_at # indicates", "of scopes that we had requested with the token def", "ckwargs: ckwargs['headers'] = {} ckwargs['headers']['Authorization'] = 'Bearer %s' % token", "# the user! Also, even if it has expired, we", "hereby granted, free of charge, to any person obtaining a", "import release # The ports that we will try to", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "issue before. Let's just remove this token. self._delete_token(self.last_returned_uuid) return None", "# access_token: Token value # token_type: Token type. Currently supported:", "on. :rtype: string or None :returns: String bearer token if", "# all copies or substantial portions of the Software. #", "uuid) with self._cache_lock: self.__refresh_cache() self._cache[uuid] = token self.__write_cache() return uuid", "will try to use for our webserver WEB_PORTS = [12345,", "UUID. If the user cancelled (or we got another error),", "which to store the token caches. Will be put through", "from uuid import uuid4 as uuidgen import webbrowser from wsgiref", "socket import os try: from urllib import urlencode except ImportError:", "refresh_result: self._delete_token(self.last_returned_uuid) return None else: self.problem_reported = True return self._cache[self.last_returned_uuid]['access_token']", "you will want to call report_token_issue() to try to renew", "to request credentials :kwarg client_secret: The client \"secret\" that goes", "token %s from cache', uuid) with self._cache_lock: self.__refresh_cache() if uuid", "\"secret\" that goes with the client_id. May be None if", "OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH", "POST submission of client secrets rather than Authorization header :kwarg", "to still be valid, prefer it # over any others", "token_type: Token type. Currently supported: \"Bearer\" # expires_at: Token expiration", "have been revoked by # the user! Also, even if", "a valid token, now comes the hard part... uuid =", "issue before requesting token') if self.problem_reported: # We were reported", "requests, except for `scopes`, `new_token` and `auto_refresh` keyword arguments. `scopes`", "if it has expired, we might still be able to", "to return tokens whose expires_at is still before the current", "= id_provider_mapping self.client_id = client_id self.client_secret = client_secret self.useragent =", "to refresh the access token # scopes: A list of", "message. This function will store the new token in the", "the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\",", "cache with %i tokens', len(self._cache)) with open(self._cachefile, 'w') as f:", "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "oldtoken['refresh_token']} if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request( 'POST',", "if method in self.idp_mapping: return self.idp + self.idp_mapping[method] else: return", "URL, or if possible, kick off their browser. This URL", "The client \"secret\" that goes with the client_id. May be", "resp['access_token'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + int(resp['expires_in']), 'idp': self.idp, 'token_type':", "Whether to use POST submission of client secrets rather than", "all copies or substantial portions of the Software. # #", "ckwargs.pop('http_method', 'POST') is_retry = False if self.token_to_try: is_retry = True", "if 'error' in resp: self.debug('Unable to refresh, error: %s', resp['error'])", "the token or delete the token. :kwarg scopes: A list", "before requesting token') if self.problem_reported: # We were reported an", "cache') if not os.path.isdir(self.cachedir): self.debug('Creating directory') os.makedirs(self.cachedir) if not os.path.exists(self._cachefile):", "The token we can use to refresh the access token", "not in self._cache: return None self._cache[uuid].update(toupdate) self.__write_cache() return uuid def", "applications relying on OpenID Connect for authentication.\"\"\" from __future__ import", "except ImportError: from io import StringIO import socket import os", "requested. It is always safe to call this though, since", "True, we will actively request the user to get a", "if we already have a token with the current app_identifier", "and then either show the user a URL, or if", "\"\"\" def _token_app(environ, start_response): query = environ['QUERY_STRING'] split = query.split('&')", "to get token with scopes %s', scopes) for uuid in", "for either removing the token if it could not be", "supposed to still be valid, prefer it # over any", "on disk. cache_lock may not be held by anyone.\"\"\" with", "will store the new token in the local cache, add", "to do so, subject to the following conditions: # #", "did not include path for %s' % method) def _refresh_token(self,", "SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "None self._cache[uuid].update(toupdate) self.__write_cache() return uuid def _delete_token(self, uuid): \"\"\"Removes a", "def report_token_issue(self): \"\"\"Report an error with the last token that", "@property def _cachefile(self): \"\"\"Property to get the cache file name", "will block until a token is retrieved if requested. It", "store the new token in the local cache, add it", "It will prefer to return tokens whose expires_at is still", "that issued the token # sub: The subject that owns", "Switch to next one continue def _get_new_token(self, scopes): \"\"\"This function", "if token: # If we had a valid token, use", "OpenID Connect for authentication.\"\"\" from __future__ import print_function from copy", "use_post: Whether to use POST submission of client secrets rather", "Dict indicating which fields need to be updated \"\"\" self.debug('Updating", "for the current client. :kwarg new_token: If True, we will", "work, we will return None and remove this token from", "function will store the new token in the local cache,", "self.debug('User cancelled') return None self.debug('We got an authorization code!') data", "NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE", "rquery['redirect_uri'] = return_uri rquery['response_mode'] = 'query' query = urlencode(rquery) authz_url", "it. You should explicitly NOT call this function if the", "# Copyright (C) 2016, 2017 Red Hat, Inc. # Red", "error: %s', resp['error']) return False self._update_token( uuid, {'access_token': resp['access_token'], 'token_type':", "return uuid, token # This is a token that may", "the self._cache from the cache on disk. Requires cache_lock to", "on one of the WEB_PORTS, and then either show the", "not in ckwargs: ckwargs['headers'] = {} ckwargs['headers']['Authorization'] = 'Bearer %s'", "if requested. It is always safe to call this though,", "None \"\"\" if not isinstance(scopes, list): raise ValueError('Scopes must be", "self.idp_mapping: return self.idp + self.idp_mapping[method] else: return ValueError('Idp Mapping did", "String bearer token if possible or None \"\"\" if not", "the token def __init__(self, app_identifier, id_provider, id_provider_mapping, client_id, client_secret=None, use_post=False,", "assert self._cache_lock.locked() return os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id) def __refresh_cache(self): \"\"\"Refreshes", "except ImportError: from urllib.parse import urlencode from uuid import uuid4", "= (uuid, token) if possible_token: self.debug('Returning possible token') return possible_token", "self.get_token(scopes, new_token=new_token) if not token: return None if self.use_post: if", "we are unable to create it, the OSError will be", "lacking specific permissions. \"\"\" if not self.last_returned_uuid: raise Exception('Cannot report", "Report error self.report_token_issue() return resp else: return resp @property def", "added to the cache \"\"\" uuid = uuidgen().hex self.debug('Adding token", "method. :param method: The method name in the IdP mapping", "self.debug('Writing cache with %i tokens', len(self._cache)) with open(self._cachefile, 'w') as", "The caller is responsible for either removing the token if", "def __refresh_cache(self): \"\"\"Refreshes the self._cache from the cache on disk.", "and/or sell # copies of the Software, and to permit", "in the local cache, add it to the valid cache,", "cache lock is held \"\"\" assert self._cache_lock.locked() return os.path.join(self.cachedir, 'oidc_%s.json'", "got a 401 and this is a retry. Report error", "Currently supported: \"Bearer\" # expires_at: Token expiration UTC time. NOTE:", "helps with broken apps that may send a 401 return", "uuid self.problem_reported = False return self._cache[uuid]['access_token'] def report_token_issue(self): \"\"\"Report an", "your application that the token you sent was invalid, you", "kv['code'] # Just return a message start_response('200 OK', [('Content-Type', 'text/plain')])", "return self.send_request(*args, **kwargs) elif resp.status_code == 401: # We got", "put the refreshed token in self._cache The caller is responsible", "this though, since if we already have a token with", "disk. Requires cache_lock to be held by caller.\"\"\" assert self._cache_lock.locked()", "the new token in the local cache, add it to", "client_id to get a new token with the specified scopes.", "request for our client_id to get a new token with", "was valid but your request failed due to a server", "%s', set(scopes), set(token['scopes'])) continue if token['expires_at'] < time.time(): # This", "len(self._cache)) def _refresh_cache(self): \"\"\"Refreshes the self._cache from the cache on", "Just return a message start_response('200 OK', [('Content-Type', 'text/plain')]) return [u'You", "lock so we also lock across # multiple invocations self._cache_lock", "on OpenID Connect. :param app_identifier: Identifier for storage of retrieved", ":kwarg use_post: Whether to use POST submission of client secrets", "on disk. Requires cache_lock to be held by caller.\"\"\" assert", "(that we will exchange for an access token) or the", "if not refresh_result: self._delete_token(self.last_returned_uuid) return None else: self.problem_reported = True", "no such tokens exist it will return the possibly expired", "def _refresh_token(self, uuid): \"\"\"Tries to refresh a token and put", "uuid: UUID of the token to be removed from cache", "= self._get_token_with_scopes(scopes) if token: # If we had a valid", "from StringIO import StringIO except ImportError: from io import StringIO", "defaults to POST.. \"\"\" ckwargs = copy(kwargs) scopes = ckwargs.pop('scopes')", "False else: self._retrieved_code = kv['code'] # Just return a message", "None server = self._get_server(_token_app) if not server: raise Exception('We were", "some magic. We will start a new webserver on one", "if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request( 'POST', self._idp_url('Token'),", "else: token = self.get_token(scopes, new_token=new_token) if not token: return None", "= json.loads(f.read()) self.debug('Loaded %i tokens', len(self._cache)) def _refresh_cache(self): \"\"\"Refreshes the", "to requests, except for `scopes`, `new_token` and `auto_refresh` keyword arguments.", ":kwarg auto_refresh: If False, will not try to automatically report", "and remove this token from the cache. If you get", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS", "The webserver will then need to catch the return with", "one will be requested unless nonblocking is True. :kwarg new_token:", "get_token(self, scopes, new_token=True): \"\"\"Function to retrieve tokens with specific scopes.", "'~/.openidc') self.last_returned_uuid = None self.problem_reported = False self.token_to_try = None", "= {'client_id': self.client_id, 'grant_type': 'refresh_token', 'refresh_token': oldtoken['refresh_token']} if self.client_secret: data['client_secret']", "token we can use to refresh the access token #", "invocations self._cache_lock = Lock() with self._cache_lock: self.__refresh_cache() self._valid_cache = []", "resp['error']) return False self._update_token( uuid, {'access_token': resp['access_token'], 'token_type': resp['token_type'], 'refresh_token':", "Token value # token_type: Token type. Currently supported: \"Bearer\" #", "A list of scopes that we had requested with the", "the idp that issued the token # sub: The subject", "the OSError will be thrown. :kwargs printfd: The File object", "ckwargs = copy(kwargs) scopes = ckwargs.pop('scopes') new_token = ckwargs.pop('new_token', True)", "continue if not set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope: %s not subset of", "resp: self.debug('Unable to refresh, error: %s', resp['error']) return False self._update_token(", ":param id_provider_mapping: Mapping with URLs to use for specific endpoints", "return tokens whose expires_at is still before the current time,", "token cache is a json serialized dict # This dict", "the CLI'.encode('ascii')] self._retrieved_code = None server = self._get_server(_token_app) if not", "already have on. :rtype: string or None :returns: String bearer", "token = self.token_to_try self.token_to_try = None else: token = self.get_token(scopes,", "to grant authorization' % authz_url, file=self._printfd) webbrowser.open(authz_url) server.handle_request() server.server_close() assert", "to get a new token with the specified scopes. The", "resp self.token_to_try = self.report_token_issue() if not self.token_to_try: return resp return", "def _get_new_token(self, scopes): \"\"\"This function kicks off some magic. We", "None. \"\"\" def _token_app(environ, start_response): query = environ['QUERY_STRING'] split =", "of charge, to any person obtaining a copy # of", "we will return None and remove this token from the", "'redirect_uri': return_uri, 'code': self._retrieved_code} if self.client_secret: data['client_secret'] = self.client_secret resp", "subject that owns the token # access_token: Token value #", "no applicable tokens were found \"\"\" possible_token = None self.debug('Trying", "= None server = self._get_server(_token_app) if not server: raise Exception('We", "use_post self.idp = id_provider self.idp_mapping = id_provider_mapping self.client_id = client_id", "resp['refresh_token'], 'expires_at': time.time() + int(resp['expires_in']), 'idp': self.idp, 'token_type': resp['token_type'], 'scopes':", "_delete_token(self, uuid): \"\"\"Removes a token from the cache and writes", "method' % method) if 'data' not in ckwargs: ckwargs['data'] =", "if 'json' in ckwargs: raise ValueError('Cannot provide json in a", "query.split('&') kv = dict([v.split('=', 1) for v in split]) if", "sent was invalid, you should call it. You should explicitly", "self._retrieved_code = False else: self._retrieved_code = kv['code'] # Just return", "client_secret self.useragent = useragent or 'python-openid-client/%s' % \\ release.VERSION self.cachedir", "token and put the refreshed token in self._cache The caller", "rquery['response_mode'] = 'query' query = urlencode(rquery) authz_url = '%s?%s' %", "goes with the client_id. May be None if your IdP", "raise Exception('We were unable to instantiate a webserver') return_uri =", "for authentication.\"\"\" from __future__ import print_function from copy import copy", "# Red Hat Author: <NAME> <<EMAIL>> # # Permission is", "instructions to. \"\"\" self.logger = logging.getLogger(__name__) self.debug = self.logger.debug self.app_id", "possible, kick off their browser. This URL will be the", "self.problem_reported = False return token[1]['access_token'] elif not new_token: return None", "return None self._cache[uuid].update(toupdate) self.__write_cache() return uuid def _delete_token(self, uuid): \"\"\"Removes", "int(resp['expires_in']), 'idp': self.idp, 'token_type': resp['token_type'], 'scopes': scopes} # AND WE", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #", "user to get a new token with the current scopeset", "THE # SOFTWARE. \"\"\"Client for applications relying on OpenID Connect", "was invalid, you should call it. You should explicitly NOT", "lock is held \"\"\" assert self._cache_lock.locked() return os.path.join(self.cachedir, 'oidc_%s.json' %", "kick off their browser. This URL will be the Authorization", "user a URL, or if possible, kick off their browser.", "with a request for our client_id to get a new", "token') if self.problem_reported: # We were reported an issue before.", "query = urlencode(rquery) authz_url = '%s?%s' % (self._idp_url('Authorization'), query) print('Please", "self.__refresh_cache() if uuid not in self._cache: return None self._cache[uuid].update(toupdate) self.__write_cache()", "of the WEB_PORTS, and then either show the user a", "This will attempt to renew the token that was last", "cache. cache_lock may not be held by anyone. :param token:", "present with this token, a new one will be requested", "webserver') return_uri = 'http://localhost:%i/' % server.socket.getsockname()[1] rquery = {} rquery['scope']", "be valid self.debug('Possible') possible_token = (uuid, token) if possible_token: self.debug('Returning", "a token and put the refreshed token in self._cache The", "the token. :kwarg scopes: A list of scopes required for", "self._cache[uuid] = token self.__write_cache() return uuid def _update_token(self, uuid, toupdate):", "a bearer token or None. Note that the bearer token", "or delete the token. :kwarg scopes: A list of scopes", "the cancellation message. This function will store the new token", "rquery['response_type'] = 'code' rquery['client_id'] = self.client_id rquery['redirect_uri'] = return_uri rquery['response_mode']", "this token from the cache. If you get an indication", "gone') def _get_token_with_scopes(self, scopes): \"\"\"Searches the cache for any tokens", "import os try: from urllib import urlencode except ImportError: from", "= {} rquery['scope'] = ' '.join(scopes) rquery['response_type'] = 'code' rquery['client_id']", "the Software, and to permit persons to whom the Software", "the user a URL, or if possible, kick off their", "that have the requested scopes. It will prefer to return", "self._cache: self.debug('Checking %s', uuid) token = self._cache[uuid] if token['idp'] !=", "is responsible for either removing the token if it could", "that was returned. This will attempt to renew the token", "Every \"token\" object is a json dict with the following", "= query.split('&') kv = dict([v.split('=', 1) for v in split])", "string :returns: The IdP URL \"\"\" if method in self.idp_mapping:", "\"\"\"Returns the IdP URL for the requested method. :param method:", "this token. self._delete_token(self.last_returned_uuid) return None refresh_result = self._refresh_token(self.last_returned_uuid) if not", "printfd def get_token(self, scopes, new_token=True): \"\"\"Function to retrieve tokens with", "token else: if 'headers' not in ckwargs: ckwargs['headers'] = {}", "token # access_token: Token value # token_type: Token type. Currently", "self._cache[uuid] if token['idp'] != self.idp: self.debug('Incorrect idp') continue if not", "new_token = ckwargs.pop('new_token', True) auto_refresh = ckwargs.pop('auto_refresh', True) method =", "to \"python-openidc-client/VERSION\" :kwarg cachedir: The directory in which to store", "not include path for %s' % method) def _refresh_token(self, uuid):", "IdP with a request for our client_id to get a", "MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() resp = resp.json()", "retrieve tokens with specific scopes. This function will block until", "scopes that need to be in the returned token :rtype:", "None else: token = self.get_token(scopes, new_token=new_token) if not token: return", "None # TODO: Make cache_lock a filesystem lock so we", "start a new webserver on one of the WEB_PORTS, and", "issues on 401. This helps with broken apps that may", "def _token_app(environ, start_response): query = environ['QUERY_STRING'] split = query.split('&') kv", "~/.openidc. If this does not exist and we are unable", "if 'error' in resp: self.debug('Error exchanging authorization code: %s', resp['error'])", "returned token :rtype: (string, dict) or None :returns: Token UUID", "this software and associated documentation files (the \"Software\"), to deal", "self.idp, 'token_type': resp['token_type'], 'scopes': scopes} # AND WE ARE DONE!", "if we do not already have on. :rtype: string or", "f.write(json.dumps({})) with open(self._cachefile, 'r') as f: self._cache = json.loads(f.read()) self.debug('Loaded", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY,", "NOTE: Even if the expires_at # indicates the token should", "= os.path.expanduser(cachedir or '~/.openidc') self.last_returned_uuid = None self.problem_reported = False", "None token = {'access_token': resp['access_token'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() +", "Software is # furnished to do so, subject to the", "this token, a new one will be requested unless nonblocking", "if renewal was succesful. :param uuid: The UUID of the", "the token if it could not be refreshed or saving", "None. Note that the bearer token might have been revoked", "of the token to be removed from cache \"\"\" self.debug('Removing", "If False, will not try to automatically report token issues", "cache_lock may not be held by anyone. :param token: Dict", "whom the Software is # furnished to do so, subject", "io import StringIO import socket import os try: from urllib", "= environ['QUERY_STRING'] split = query.split('&') kv = dict([v.split('=', 1) for", "for v in split]) if 'error' in kv: self.debug('Error code", "%s', uuid) data = {'client_id': self.client_id, 'grant_type': 'refresh_token', 'refresh_token': oldtoken['refresh_token']}", "the account or token was lacking specific permissions. \"\"\" if", "do so, subject to the following conditions: # # The", "cache', uuid) with self._cache_lock: self.__refresh_cache() if uuid in self._cache: self.debug('Removing", "'python-openid-client/%s' % \\ release.VERSION self.cachedir = os.path.expanduser(cachedir or '~/.openidc') self.last_returned_uuid", "self.report_token_issue() if not self.token_to_try: return resp return self.send_request(*args, **kwargs) elif", "%i tokens', len(self._cache)) def _refresh_cache(self): \"\"\"Refreshes the self._cache from the", "The user cancelled the request self._retrieved_code = None self.debug('User cancelled')", "than Authorization header :kwarg client_id: The Client Identifier used to", "need to be in the returned token :rtype: (string, dict)", "magic. We will start a new webserver on one of", "requested with the token def __init__(self, app_identifier, id_provider, id_provider_mapping, client_id,", "# furnished to do so, subject to the following conditions:", "scopes: List of scopes that need to be in the", "time. NOTE: Even if the expires_at # indicates the token", "ValueError('Scopes must be a list') token = self._get_token_with_scopes(scopes) if token:", "# We were reported an issue before. Let's just remove", "shall be included in # all copies or substantial portions", "+ resp['expires_in']}) self.debug('Refreshed until %s', self._cache[uuid]['expires_at']) return True def _get_server(self,", "KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO", "not provided, defaults to \"python-openidc-client/VERSION\" :kwarg cachedir: The directory in", "may not be held by anyone. :param uuid: UUID of", "OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "'refresh_token', 'refresh_token': oldtoken['refresh_token']} if self.client_secret: data['client_secret'] = self.client_secret resp =", "= {} ckwargs['headers']['Authorization'] = 'Bearer %s' % token resp =", "the requested method. :param method: The method name in the", "None self._retrieved_code = None # TODO: Make cache_lock a filesystem", "start_response('200 OK', [('Content-Type', 'text/plain')]) return [u'You can close this window", "caller.\"\"\" assert self._cache_lock.locked() self.debug('Writing cache with %i tokens', len(self._cache)) with", "Author: <NAME> <<EMAIL>> # # Permission is hereby granted, free", "will exchange for an access token) or the cancellation message.", "in split]) if 'error' in kv: self.debug('Error code returned: %s", "Copyright (C) 2016, 2017 Red Hat, Inc. # Red Hat", "is retrieved if requested. It is always safe to call", "token # scopes: A list of scopes that we had", "retrieved tokens :param id_provider: URL of the identity provider to", ":param scopes: List of scopes that need to be in", "which fields need to be updated \"\"\" self.debug('Updating token %s", "len(self._cache)) with open(self._cachefile, 'w') as f: f.write(json.dumps(self._cache)) def _add_token(self, token):", "does not require you to use a secret. :kwarg useragent:", "bearer token might have been revoked by the user or", "The method name in the IdP mapping dict. :rtype: string", "self.client_secret = client_secret self.useragent = useragent or 'python-openid-client/%s' % \\", "uuid in self._cache: self.debug('Removing token') del self._cache[uuid] self.__write_cache() else: self.debug('Token", "a webserver') return_uri = 'http://localhost:%i/' % server.socket.getsockname()[1] rquery = {}", "= requests.request(method, *args, **ckwargs) if resp.status_code == 401 and not", "IdP URL \"\"\" if method in self.idp_mapping: return self.idp +", "on. :kwarg auto_refresh: If False, will not try to automatically", "uuid import uuid4 as uuidgen import webbrowser from wsgiref import", "be in the returned token :rtype: (string, dict) or None", "renew the token or delete the token. :kwarg scopes: A", "prefer it # over any others we have self.debug('Not yet", "= False return self._cache[uuid]['access_token'] def report_token_issue(self): \"\"\"Report an error with", "refresh, error: %s', resp['error']) return False self._update_token( uuid, {'access_token': resp['access_token'],", "been revoked by the user or expired. In that case,", "Exception('Cannot report issue before requesting token') if self.problem_reported: # We", "anyone. :param token: Dict of the token to be added", "if not os.path.isdir(self.cachedir): self.debug('Creating directory') os.makedirs(self.cachedir) if not os.path.exists(self._cachefile): self.debug('Creating", "Dict of the token to be added to the cache", "[('Content-Type', 'text/plain')]) return [u'You can close this window and return", "(string, dict) or None :returns: Token UUID and contents or", "self.token_to_try: is_retry = True token = self.token_to_try self.token_to_try = None", "python-requests POST request. Allarguments and keyword arguments are like the", "of the IdP with a request for our client_id to", "_update_token(self, uuid, toupdate): \"\"\"Updates a token in the cache. cache_lock", "scopes = ckwargs.pop('scopes') new_token = ckwargs.pop('new_token', True) auto_refresh = ckwargs.pop('auto_refresh',", "\\ release.VERSION self.cachedir = os.path.expanduser(cachedir or '~/.openidc') self.last_returned_uuid = None", "request the user to get a new token with the", "may not be held by anyone. :param token: UUID of", "def _idp_url(self, method): \"\"\"Returns the IdP URL for the requested", "that may send a 401 return code in incorrect cases.", "name in the IdP mapping dict. :rtype: string :returns: The", "\"\"\" self.debug('Updating token %s in cache, fields %s', uuid, toupdate.keys())", "%s method' % method) if 'data' not in ckwargs: ckwargs['data']", "uuidgen().hex self.debug('Adding token %s to cache', uuid) with self._cache_lock: self.__refresh_cache()", ":param toupdate: Dict indicating which fields need to be updated", "= self._get_server(_token_app) if not server: raise Exception('We were unable to", "return uuid def _update_token(self, uuid, toupdate): \"\"\"Updates a token in", "token to be updated :param toupdate: Dict indicating which fields", "actively request the user to get a new token with", "token in self._cache The caller is responsible for either removing", "if self._retrieved_code is False: # The user cancelled the request", "cachedir=None, printfd=sys.stdout): \"\"\"Client for interacting with web services relying on", "a token that's supposed to still be valid, prefer it", "self._cache_lock: self.__refresh_cache() self._cache[uuid] = token self.__write_cache() return uuid def _update_token(self,", "= simple_server.make_server('0.0.0.0', port, app) return server except socket.error: # This", "not os.path.exists(self._cachefile): self.debug('Creating file') with open(self._cachefile, 'w') as f: f.write(json.dumps({}))", "file') with open(self._cachefile, 'w') as f: f.write(json.dumps({})) with open(self._cachefile, 'r')", "# We did not have a valid token, now comes", "None :returns: String bearer token if possible or None \"\"\"", "data = {'client_id': self.client_id, 'grant_type': 'authorization_code', 'redirect_uri': return_uri, 'code': self._retrieved_code}", "token was succesfully refreshed, False otherwise \"\"\" oldtoken = self._cache[uuid]", "self.debug('Error exchanging authorization code: %s', resp['error']) return None token =", "POST tokens in %s method' % method) if 'data' not", "not subset of %s', set(scopes), set(token['scopes'])) continue if token['expires_at'] <", "token. self._delete_token(self.last_returned_uuid) return None refresh_result = self._refresh_token(self.last_returned_uuid) if not refresh_result:", "Red Hat Author: <NAME> <<EMAIL>> # # Permission is hereby", "before. Let's just remove this token. self._delete_token(self.last_returned_uuid) return None refresh_result", "cache and writes cache to disk. cache_lock may not be", "401 and this is a retry. Report error self.report_token_issue() return", "_refresh_cache(self): \"\"\"Refreshes the self._cache from the cache on disk. cache_lock", "WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND", "or if possible, kick off their browser. This URL will", "call report_token_issue() to try to renew the token or delete", "through expanduer. Default is ~/.openidc. If this does not exist", "= False if self.token_to_try: is_retry = True token = self.token_to_try", "with self._cache_lock: self.__refresh_cache() def __write_cache(self): \"\"\"Wirtes self._cache to cache on", "exist it will return the possibly expired token: it might", "server.socket.getsockname()[1] rquery = {} rquery['scope'] = ' '.join(scopes) rquery['response_type'] =", "the token that was last returned. If that worked, we", "the Software is # furnished to do so, subject to", "Default is ~/.openidc. If this does not exist and we", "= None # TODO: Make cache_lock a filesystem lock so", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED,", "function kicks off some magic. We will start a new", "limitation the rights # to use, copy, modify, merge, publish,", "in self._cache The caller is responsible for either removing the", "= self.client_id rquery['redirect_uri'] = return_uri rquery['response_mode'] = 'query' query =", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #", "token instructions to. \"\"\" self.logger = logging.getLogger(__name__) self.debug = self.logger.debug", "expanduer. Default is ~/.openidc. If this does not exist and", "or None :returns: String bearer token if possible or None", "is not present with this token, a new one will", "None # We did not have a valid token, now", "disk. cache_lock may not be held by anyone.\"\"\" with self._cache_lock:", "refresh a token and put the refreshed token in self._cache", "import uuid4 as uuidgen import webbrowser from wsgiref import simple_server", "self._refresh_token(self.last_returned_uuid) if not refresh_result: self._delete_token(self.last_returned_uuid) return None else: self.problem_reported =", "documentation files (the \"Software\"), to deal # in the Software", "copies or substantial portions of the Software. # # THE", "is ~/.openidc. If this does not exist and we are", "is held \"\"\" assert self._cache_lock.locked() return os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id)", "to call this though, since if we already have a", "% method) def _refresh_token(self, uuid): \"\"\"Tries to refresh a token", "token with the current scopeset if we do not already", "= ckwargs.pop('http_method', 'POST') is_retry = False if self.token_to_try: is_retry =", "time.time() + int(resp['expires_in']), 'idp': self.idp, 'token_type': resp['token_type'], 'scopes': scopes} #", "to be added to the cache \"\"\" uuid = uuidgen().hex", "rquery['client_id'] = self.client_id rquery['redirect_uri'] = return_uri rquery['response_mode'] = 'query' query", "if self.problem_reported: # We were reported an issue before. Let's", "OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT", "client \"secret\" that goes with the client_id. May be None", "raise Exception('Cannot report issue before requesting token') if self.problem_reported: #", "return os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id) def __refresh_cache(self): \"\"\"Refreshes the self._cache", "for uuid in self._cache: self.debug('Checking %s', uuid) token = self._cache[uuid]", "specific endpoints on the IdP. :kwarg use_post: Whether to use", "credentials :kwarg client_secret: The client \"secret\" that goes with the", "resp['access_token'], 'token_type': resp['token_type'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + resp['expires_in']}) self.debug('Refreshed", "{} ckwargs['headers']['Authorization'] = 'Bearer %s' % token resp = requests.request(method,", "expired. In that case, you will want to call report_token_issue()", "port, app) return server except socket.error: # This port did", "will return None and remove this token from the cache.", "# scopes: A list of scopes that we had requested", "to # refresh the token. # refresh_token: The token we", "otherwise \"\"\" oldtoken = self._cache[uuid] self.debug('Refreshing token %s', uuid) data", "tokens :param id_provider: URL of the identity provider to get", "of scopes that need to be in the returned token", "new token with the specified scopes. The webserver will then", "Will be put through expanduer. Default is ~/.openidc. If this", "even if it has expired, we might still be able", "'refresh_token': resp['refresh_token'], 'expires_at': time.time() + resp['expires_in']}) self.debug('Refreshed until %s', self._cache[uuid]['expires_at'])", "get a new token with the specified scopes. The webserver", "the token to be removed from cache \"\"\" self.debug('Removing token", "The File object to print token instructions to. \"\"\" self.logger", "exist and we are unable to create it, the OSError", "by anyone.\"\"\" with self._cache_lock: self.__refresh_cache() def __write_cache(self): \"\"\"Wirtes self._cache to", "def _add_token(self, token): \"\"\"Adds a token to the cache and", "# This dict contains uuid: token pairs # Every \"token\"", "valid token, use that self.last_returned_uuid = token[0] self.problem_reported = False", "object is a json dict with the following keys: #", "off their browser. This URL will be the Authorization endpoint", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "for our client_id to get a new token with the", "= dict([v.split('=', 1) for v in split]) if 'error' in", ":rtype: string or None :returns: String bearer token if possible", "urlencode from uuid import uuid4 as uuidgen import webbrowser from", "Software, and to permit persons to whom the Software is", "have the requested scopes. It will prefer to return tokens", "server except socket.error: # This port did not work. Switch", "token = self._get_token_with_scopes(scopes) if token: # If we had a", "access token) or the cancellation message. This function will store", "call. If a token is not present with this token,", "resp return self.send_request(*args, **kwargs) elif resp.status_code == 401: # We", "Requires cache_lock to be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Writing", "self._cache_lock: self.__refresh_cache() if uuid not in self._cache: return None self._cache[uuid].update(toupdate)", "a filesystem lock so we also lock across # multiple", "None and remove this token from the cache. If you", "auto_refresh: return resp self.token_to_try = self.report_token_issue() if not self.token_to_try: return", "in the IdP mapping dict. :rtype: string :returns: The IdP", "if not isinstance(scopes, list): raise ValueError('Scopes must be a list')", "to the valid cache, and then return the UUID. If", "If a token is not present with this token, a", "= {'access_token': resp['access_token'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + int(resp['expires_in']), 'idp':", "from cache', uuid) with self._cache_lock: self.__refresh_cache() if uuid in self._cache:", "This port did not work. Switch to next one continue", "use a secret. :kwarg useragent: Useragent string to use. If", "to try to renew the token or delete the token.", "the cache lock is held \"\"\" assert self._cache_lock.locked() return os.path.join(self.cachedir,", "to the cache and writes cache to disk. cache_lock may", "with open(self._cachefile, 'w') as f: f.write(json.dumps({})) with open(self._cachefile, 'r') as", ":returns: Token UUID and contents or None if no applicable", "obtaining a copy # of this software and associated documentation", "401: # We got a 401 and this is a", "is # furnished to do so, subject to the following", "If you get an indication from your application that the", "to whom the Software is # furnished to do so,", "os.path.isdir(self.cachedir): self.debug('Creating directory') os.makedirs(self.cachedir) if not os.path.exists(self._cachefile): self.debug('Creating file') with", "THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE.", "safe to call this though, since if we already have", "try to automatically report token issues on 401. This helps", "with self._cache_lock: self.__refresh_cache() self._cache[uuid] = token self.__write_cache() return uuid def", "\"\"\"Refreshes the self._cache from the cache on disk. Requires cache_lock", "The above copyright notice and this permission notice shall be", "assures that whenever this file is touched, the cache lock", "others we have self.debug('Not yet expired, returning') return uuid, token", "returning') return uuid, token # This is a token that", "# This port did not work. Switch to next one", "= uuid self.problem_reported = False return self._cache[uuid]['access_token'] def report_token_issue(self): \"\"\"Report", "required for the current client. :kwarg new_token: If True, we", "should explicitly NOT call this function if the token was", "the cache on disk. cache_lock may not be held by", "server.handle_request() server.server_close() assert self._retrieved_code is not None if self._retrieved_code is", "if token['idp'] != self.idp: self.debug('Incorrect idp') continue if not set(scopes).issubset(set(token['scopes'])):", "StringIO import StringIO except ImportError: from io import StringIO import", "with broken apps that may send a 401 return code", "of the token to be added to the cache \"\"\"", "will be thrown. :kwargs printfd: The File object to print", "not new_token: return None # We did not have a", "attempt to renew the token that was last returned. If", "_get_new_token(self, scopes): \"\"\"This function kicks off some magic. We will", "return resp @property def _cachefile(self): \"\"\"Property to get the cache", "__future__ import print_function from copy import copy import json import", "= [] self._printfd = printfd def get_token(self, scopes, new_token=True): \"\"\"Function", "return None token = {'access_token': resp['access_token'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time()", "with the following keys: # idp: The URL of the", "get token with scopes %s', scopes) for uuid in self._cache:" ]
[ "False), **attrib) def _encode_param_common(prm, title_required=True): \"\"\" Encode common sub-elements of", "notice shall be included in all # copies of this", "dtype.encode(range_.minval))) if range_.maxval is not None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if range_.spacing", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A", "'closed': attr = {ns_ows(\"rangeClosure\"): range_.closure} if range_.minval is not None:", "<<EMAIL>> # <NAME> <<EMAIL>> # #------------------------------------------------------------------------------- # Copyright (C) 2013", "for v in enum.values) for range_ in ranges: attr, elms", "elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, False)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, False))", "if range_.spacing is not None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms, **attr))", "from eoxserver.services.ows.wps.v10.util import ( OWS, WPS, NIL, ns_ows, ) #-------------------------------------------------------------------------------", "TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR", "if outdef.uom is not None: attrib['uom'] = outdef.uom if outdef.crs", "WPS(\"Input\", *_encode_param_common(prm, False)) def encode_output_exec(prm): \"\"\" Encode common part of", "rights # to use, copy, modify, merge, publish, distribute, sublicense,", "elif isinstance(avobj, AllowedRangeCollection): enum, ranges = avobj.enum, avobj.ranges else: raise", "return OWS(\"AllowedValues\", *elist) #------------------------------------------------------------------------------- def _encode_complex(prm, is_input): return NIL(\"ComplexData\" if", "if outdef.mime_type is not None: attrib['mimeType'] = outdef.mime_type if outdef.encoding", "f in prm.formats.itervalues()]) ) def _encode_format(frmt): elem = NIL(\"Format\", NIL(\"MimeType\",", "\"BoundingBoxOutput\", NIL(\"Default\", NIL(\"CRS\", prm.encode_crs(prm.default_crs))), NIL(\"Supported\", *[NIL(\"CRS\", prm.encode_crs(crs)) for crs in", "elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, False)) return elem def encode_input_exec(prm): \"\"\"", "*[OWS(\"UOM\", u) for u in prm.uoms]) )) if is_input: elem.append(_encode_allowed_value(prm.allowed_values))", "# WPS 1.0 parameters' XML encoders # # Project: EOxServer", "})) if prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\", u)", "import ( LiteralData, ComplexData, BoundingBoxData, AllowedAny, AllowedEnum, AllowedRange, AllowedRangeCollection, AllowedByReference,", "part of the execure response data output.\"\"\" return WPS(\"Output\", *_encode_param_common(prm))", "XML encoders # # Project: EOxServer <http://eoxserver.org> # Authors: <NAME>", "elem.append(_encode_literal(prm, True)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, True)) elif isinstance(prm, BoundingBoxData):", "the execure response data input.\"\"\" return WPS(\"Input\", *_encode_param_common(prm, False)) def", "EOX IT Services GmbH # # Permission is hereby granted,", "definition.\"\"\" attrib = {} if outdef.uom is not None: attrib['uom']", "OR OTHER DEALINGS IN # THE SOFTWARE. #------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters", "is not None: attrib['mimeType'] = outdef.mime_type if outdef.encoding is not", "# # The above copyright notice and this permission notice", "IN # THE SOFTWARE. #------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters import ( LiteralData,", "of the execure response data input.\"\"\" return WPS(\"Input\", *_encode_param_common(prm, False))", "is_input else \"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f) for f in", "def _encode_param_common(prm, title_required=True): \"\"\" Encode common sub-elements of all XML", "value object! OBJ=%r\"%avobj) dtype = avobj.dtype ddtype = dtype.get_diff_dtype() if", "elem = NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"] = (\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] =", "( LiteralData, ComplexData, BoundingBoxData, AllowedAny, AllowedEnum, AllowedRange, AllowedRangeCollection, AllowedByReference, )", "not None: attrib['mimeType'] = outdef.mime_type if outdef.encoding is not None:", "if frmt.encoding is not None: elem.append(NIL(\"Encoding\", frmt.encoding)) if frmt.schema is", "THE USE OR OTHER DEALINGS IN # THE SOFTWARE. #-------------------------------------------------------------------------------", "and associated documentation files (the \"Software\"), to deal # in", "Software without restriction, including without limitation the rights # to", "and to permit persons to whom the Software is #", "AllowedRange): ranges = [avobj] elif isinstance(avobj, AllowedRangeCollection): enum, ranges =", "isinstance(avobj, AllowedEnum): enum = avobj elif isinstance(avobj, AllowedRange): ranges =", "return NIL(\"BoundingBoxData\" if is_input else \"BoundingBoxOutput\", NIL(\"Default\", NIL(\"CRS\", prm.encode_crs(prm.default_crs))), NIL(\"Supported\",", "copies of the Software, and to permit persons to whom", "elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\", u) for u in", "hereby granted, free of charge, to any person obtaining a", "this permission notice shall be included in all # copies", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "distribute, sublicense, and/or sell # copies of the Software, and", "from eoxserver.services.ows.wps.parameters import ( LiteralData, ComplexData, BoundingBoxData, AllowedAny, AllowedEnum, AllowedRange,", "OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "dtype.name, **{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\",", "}) elif isinstance(avobj, AllowedEnum): enum = avobj elif isinstance(avobj, AllowedRange):", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #", "isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, True)) return elem def encode_output_descr(prm): \"\"\" Encode", "if is_input else \"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f) for f", "\"\"\" Encode the execure response output definition.\"\"\" attrib = {}", "= {} if outdef.uom is not None: attrib['uom'] = outdef.uom", "enum = avobj elif isinstance(avobj, AllowedRange): ranges = [avobj] elif", "output definition.\"\"\" attrib = {} if outdef.uom is not None:", "be included in all # copies of this Software or", "in ranges: attr, elms = {}, [] if range_.closure !=", "elem.attrib[\"minOccurs\"] = (\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] = \"1\" if isinstance(prm, LiteralData):", "outdef.schema is not None: attrib['schema'] = outdef.schema if outdef.as_reference is", "frmt.encoding is not None: elem.append(NIL(\"Encoding\", frmt.encoding)) if frmt.schema is not", "if enum is not None: elist.extend(OWS(\"Value\", dtype.encode(v)) for v in", "OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH", "deal # in the Software without restriction, including without limitation", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell #", "None: attrib['schema'] = outdef.schema if outdef.as_reference is not None: attrib['asReference']", "NIL(\"Output\", *_encode_param_common(prm)) if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, False)) elif isinstance(prm, ComplexData):", "if outdef.schema is not None: attrib['schema'] = outdef.schema if outdef.as_reference", "execure response data input.\"\"\" return WPS(\"Input\", *_encode_param_common(prm, False)) def encode_output_exec(prm):", "ranges: attr, elms = {}, [] if range_.closure != 'closed':", "is not None: elist.extend(OWS(\"Value\", dtype.encode(v)) for v in enum.values) for", "enum.values) for range_ in ranges: attr, elms = {}, []", "[], [] if isinstance(avobj, AllowedAny): return OWS(\"AnyValue\") elif isinstance(avobj, AllowedByReference):", "encode_output_def(outdef): \"\"\" Encode the execure response output definition.\"\"\" attrib =", "copy, modify, merge, publish, distribute, sublicense, and/or sell # copies", "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "dtype = avobj.dtype ddtype = dtype.get_diff_dtype() if enum is not", "\"1\" if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, True)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm,", "is not None: attrib['schema'] = outdef.schema if outdef.as_reference is not", "not None: elist.extend(OWS(\"Value\", dtype.encode(v)) for v in enum.values) for range_", ") def _encode_format(frmt): elem = NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type)) if frmt.encoding", "_encode_param_common(prm, title_required=True): \"\"\" Encode common sub-elements of all XML parameters.\"\"\"", "attrib['encoding'] = outdef.encoding if outdef.schema is not None: attrib['schema'] =", "software and associated documentation files (the \"Software\"), to deal #", "not None: attrib['encoding'] = outdef.encoding if outdef.schema is not None:", "LiteralData): elem.append(_encode_literal(prm, True)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, True)) elif isinstance(prm,", "prm.title or prm.identifier)) if prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract)) return elist #-------------------------------------------------------------------------------", "elif isinstance(avobj, AllowedByReference): return WPS(\"ValuesReference\", **{ ns_ows(\"reference\"): avobj.url, \"valuesForm\": avobj.url,", "Encode the execure response output definition.\"\"\" attrib = {} if", "= \"1\" if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, True)) elif isinstance(prm, ComplexData):", "not None: attrib['asReference'] = 'true' if outdef.as_reference else 'false' return", "# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "is_input else \"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name, **{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if", "NIL(\"ComplexData\" if is_input else \"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f) for", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR", "\"\"\" Encode common sub-elements of all XML parameters.\"\"\" elist =", "the Software without restriction, including without limitation the rights #", "\"\"\" Encode common part of the execure response data output.\"\"\"", "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms, **attr)) return OWS(\"AllowedValues\", *elist) #-------------------------------------------------------------------------------", "if outdef.crs is not None: attrib['crs'] = outdef.crs if outdef.mime_type", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #", "_encode_complex(prm, is_input): return NIL(\"ComplexData\" if is_input else \"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)),", "elem.append(NIL(\"DefaultValue\", str(prm.default))) return elem def _encode_allowed_value(avobj): enum, ranges, elist =", "<reponame>constantinius/eoxserver_combined<filename>eoxserver/services/ows/wps/v10/encoders/parameters.py #------------------------------------------------------------------------------- # # WPS 1.0 parameters' XML encoders #", "elem = NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type)) if frmt.encoding is not None:", "# # Project: EOxServer <http://eoxserver.org> # Authors: <NAME> <<EMAIL>> #", "elist.append(OWS(\"Abstract\", prm.abstract)) return elist #------------------------------------------------------------------------------- def _encode_literal(prm, is_input): dtype =", "TypeError(\"Invalid allowed value object! OBJ=%r\"%avobj) dtype = avobj.dtype ddtype =", "1.0 parameters' XML encoders # # Project: EOxServer <http://eoxserver.org> #", "frmt.schema)) return elem #------------------------------------------------------------------------------- def _encode_bbox(prm, is_input): return NIL(\"BoundingBoxData\" if", "# of this software and associated documentation files (the \"Software\"),", "furnished to do so, subject to the following conditions: #", "to do so, subject to the following conditions: # #", "# The above copyright notice and this permission notice shall", "#------------------------------------------------------------------------------- def _encode_literal(prm, is_input): dtype = prm.dtype elem = NIL(\"LiteralData\"", "SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "avobj.ranges else: raise TypeError(\"Invalid allowed value object! OBJ=%r\"%avobj) dtype =", "a copy # of this software and associated documentation files", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF", "ranges, elist = None, [], [] if isinstance(avobj, AllowedAny): return", "AllowedRangeCollection): enum, ranges = avobj.enum, avobj.ranges else: raise TypeError(\"Invalid allowed", "= (\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] = \"1\" if isinstance(prm, LiteralData): elem.append(_encode_literal(prm,", "AllowedAny, AllowedEnum, AllowedRange, AllowedRangeCollection, AllowedByReference, ) from eoxserver.services.ows.wps.v10.util import (", "Encode common sub-elements of all XML parameters.\"\"\" elist = [OWS(\"Identifier\",", "None: attrib['mimeType'] = outdef.mime_type if outdef.encoding is not None: attrib['encoding']", "range_.closure} if range_.minval is not None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if range_.maxval", "= outdef.uom if outdef.crs is not None: attrib['crs'] = outdef.crs", "if outdef.as_reference is not None: attrib['asReference'] = 'true' if outdef.as_reference", "u in prm.uoms]) )) if is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default is", "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "encode_input_exec(prm): \"\"\" Encode common part of the execure response data", "outdef.mime_type is not None: attrib['mimeType'] = outdef.mime_type if outdef.encoding is", "elem = NIL(\"LiteralData\" if is_input else \"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name, **{", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "else 'false' return WPS(\"Output\", *_encode_param_common(outdef, False), **attrib) def _encode_param_common(prm, title_required=True):", "NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE", "NIL(\"CRS\", prm.encode_crs(prm.default_crs))), NIL(\"Supported\", *[NIL(\"CRS\", prm.encode_crs(crs)) for crs in prm.crss]) )", "prm.identifier)) if prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract)) return elist #------------------------------------------------------------------------------- def _encode_literal(prm,", "prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract)) return elist #------------------------------------------------------------------------------- def _encode_literal(prm, is_input): dtype", "LiteralData, ComplexData, BoundingBoxData, AllowedAny, AllowedEnum, AllowedRange, AllowedRangeCollection, AllowedByReference, ) from", "following conditions: # # The above copyright notice and this", "BoundingBoxData): elem.append(_encode_bbox(prm, False)) return elem def encode_input_exec(prm): \"\"\" Encode common", "to deal # in the Software without restriction, including without", "NIL(\"Default\", NIL(\"CRS\", prm.encode_crs(prm.default_crs))), NIL(\"Supported\", *[NIL(\"CRS\", prm.encode_crs(crs)) for crs in prm.crss])", "isinstance(prm, ComplexData): elem.append(_encode_complex(prm, False)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, False)) return", "elist.append(OWS(\"Range\", *elms, **attr)) return OWS(\"AllowedValues\", *elist) #------------------------------------------------------------------------------- def _encode_complex(prm, is_input):", "isinstance(avobj, AllowedRange): ranges = [avobj] elif isinstance(avobj, AllowedRangeCollection): enum, ranges", "conditions: # # The above copyright notice and this permission", "is not None: attrib['asReference'] = 'true' if outdef.as_reference else 'false'", "_encode_format(frmt): elem = NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type)) if frmt.encoding is not", "frmt.schema is not None: elem.append(NIL(\"Schema\", frmt.schema)) return elem #------------------------------------------------------------------------------- def", "ranges = [avobj] elif isinstance(avobj, AllowedRangeCollection): enum, ranges = avobj.enum,", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "Copyright (C) 2013 EOX IT Services GmbH # # Permission", "range_.closure != 'closed': attr = {ns_ows(\"rangeClosure\"): range_.closure} if range_.minval is", "IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS", "outdef.crs if outdef.mime_type is not None: attrib['mimeType'] = outdef.mime_type if", "outdef.encoding if outdef.schema is not None: attrib['schema'] = outdef.schema if", "FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN", "ComplexData): elem.append(_encode_complex(prm, False)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, False)) return elem", "**{ ns_ows(\"reference\"): avobj.url, \"valuesForm\": avobj.url, }) elif isinstance(avobj, AllowedEnum): enum", "return WPS(\"Output\", *_encode_param_common(prm)) def encode_output_def(outdef): \"\"\" Encode the execure response", "NIL(\"BoundingBoxData\" if is_input else \"BoundingBoxOutput\", NIL(\"Default\", NIL(\"CRS\", prm.encode_crs(prm.default_crs))), NIL(\"Supported\", *[NIL(\"CRS\",", "elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms, **attr)) return OWS(\"AllowedValues\", *elist) #------------------------------------------------------------------------------- def", "Software or works derived from this Software. # # THE", "True)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, True)) return elem def encode_output_descr(prm):", "{ns_ows(\"rangeClosure\"): range_.closure} if range_.minval is not None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if", "\"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f) for f in prm.formats.itervalues()]) )", "USE OR OTHER DEALINGS IN # THE SOFTWARE. #------------------------------------------------------------------------------- from", "outdef.as_reference else 'false' return WPS(\"Output\", *_encode_param_common(outdef, False), **attrib) def _encode_param_common(prm,", "elist.append(OWS(\"Title\", prm.title or prm.identifier)) if prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract)) return elist", "<NAME> <<EMAIL>> # #------------------------------------------------------------------------------- # Copyright (C) 2013 EOX IT", "NIL(\"MimeType\", frmt.mime_type)) if frmt.encoding is not None: elem.append(NIL(\"Encoding\", frmt.encoding)) if", "u) for u in prm.uoms]) )) if is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if", "the execure response output definition.\"\"\" attrib = {} if outdef.uom", "return WPS(\"Output\", *_encode_param_common(outdef, False), **attrib) def _encode_param_common(prm, title_required=True): \"\"\" Encode", "elem.append(OWS(\"DataType\", dtype.name, **{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\",", "if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, False)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, False))", "data input.\"\"\" return WPS(\"Input\", *_encode_param_common(prm, False)) def encode_output_exec(prm): \"\"\" Encode", "and/or sell # copies of the Software, and to permit", "ns_ows(\"reference\"): avobj.url, \"valuesForm\": avobj.url, }) elif isinstance(avobj, AllowedEnum): enum =", "the rights # to use, copy, modify, merge, publish, distribute,", "response output definition.\"\"\" attrib = {} if outdef.uom is not", "elem.append(NIL(\"Schema\", frmt.schema)) return elem #------------------------------------------------------------------------------- def _encode_bbox(prm, is_input): return NIL(\"BoundingBoxData\"", "notice and this permission notice shall be included in all", "is hereby granted, free of charge, to any person obtaining", "execure response data output.\"\"\" return WPS(\"Output\", *_encode_param_common(prm)) def encode_output_def(outdef): \"\"\"", "v in enum.values) for range_ in ranges: attr, elms =", "elif isinstance(avobj, AllowedEnum): enum = avobj elif isinstance(avobj, AllowedRange): ranges", "\"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name, **{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if prm.uoms: elem.append(NIL(\"UOMs\",", "encode_output_exec(prm): \"\"\" Encode common part of the execure response data", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR", "OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\", u) for u in prm.uoms]) ))", "elem = NIL(\"Output\", *_encode_param_common(prm)) if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, False)) elif", "\"\"\" Encode process description output.\"\"\" elem = NIL(\"Output\", *_encode_param_common(prm)) if", "person obtaining a copy # of this software and associated", "# # Permission is hereby granted, free of charge, to", "without restriction, including without limitation the rights # to use,", "= outdef.encoding if outdef.schema is not None: attrib['schema'] = outdef.schema", "False)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, False)) return elem def encode_input_exec(prm):", "elem def encode_input_exec(prm): \"\"\" Encode common part of the execure", "None: elem.append(NIL(\"Encoding\", frmt.encoding)) if frmt.schema is not None: elem.append(NIL(\"Schema\", frmt.schema))", "subject to the following conditions: # # The above copyright", "IT Services GmbH # # Permission is hereby granted, free", "response data output.\"\"\" return WPS(\"Output\", *_encode_param_common(prm)) def encode_output_def(outdef): \"\"\" Encode", "'false' return WPS(\"Output\", *_encode_param_common(outdef, False), **attrib) def _encode_param_common(prm, title_required=True): \"\"\"", "\"\"\" Encode process description input.\"\"\" elem = NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"]", "'true' if outdef.as_reference else 'false' return WPS(\"Output\", *_encode_param_common(outdef, False), **attrib)", "parameters.\"\"\" elist = [OWS(\"Identifier\", prm.identifier)] if prm.title or title_required: elist.append(OWS(\"Title\",", "False)) return elem def encode_input_exec(prm): \"\"\" Encode common part of", "is not None: elem.append(NIL(\"DefaultValue\", str(prm.default))) return elem def _encode_allowed_value(avobj): enum,", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "or prm.identifier)) if prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract)) return elist #------------------------------------------------------------------------------- def", "eoxserver.services.ows.wps.v10.util import ( OWS, WPS, NIL, ns_ows, ) #------------------------------------------------------------------------------- def", "frmt.mime_type)) if frmt.encoding is not None: elem.append(NIL(\"Encoding\", frmt.encoding)) if frmt.schema", "isinstance(prm, LiteralData): elem.append(_encode_literal(prm, False)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, False)) elif", "# Copyright (C) 2013 EOX IT Services GmbH # #", "False)) def encode_output_exec(prm): \"\"\" Encode common part of the execure", "is not None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms, **attr)) return OWS(\"AllowedValues\",", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "of all XML parameters.\"\"\" elist = [OWS(\"Identifier\", prm.identifier)] if prm.title", "is not None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if range_.maxval is not None:", "description output.\"\"\" elem = NIL(\"Output\", *_encode_param_common(prm)) if isinstance(prm, LiteralData): elem.append(_encode_literal(prm,", "is not None: elem.append(NIL(\"Schema\", frmt.schema)) return elem #------------------------------------------------------------------------------- def _encode_bbox(prm,", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "range_.minval is not None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if range_.maxval is not", "(\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] = \"1\" if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, True))", "not None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if range_.spacing is not None: elms.append(OWS(\"Spacing\",", "*_encode_param_common(prm)) elem.attrib[\"minOccurs\"] = (\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] = \"1\" if isinstance(prm,", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "enum is not None: elist.extend(OWS(\"Value\", dtype.encode(v)) for v in enum.values)", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION", "elist #------------------------------------------------------------------------------- def _encode_literal(prm, is_input): dtype = prm.dtype elem =", "\"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] = \"1\" if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, True)) elif", "_encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f) for f in prm.formats.itervalues()]) ) def _encode_format(frmt):", "# Permission is hereby granted, free of charge, to any", "of charge, to any person obtaining a copy # of", "enum, ranges = avobj.enum, avobj.ranges else: raise TypeError(\"Invalid allowed value", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #", "merge, publish, distribute, sublicense, and/or sell # copies of the", "OWS(\"AllowedValues\", *elist) #------------------------------------------------------------------------------- def _encode_complex(prm, is_input): return NIL(\"ComplexData\" if is_input", "# # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "EOxServer <http://eoxserver.org> # Authors: <NAME> <<EMAIL>> # <NAME> <<EMAIL>> #", "_encode_allowed_value(avobj): enum, ranges, elist = None, [], [] if isinstance(avobj,", "NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT", "outdef.encoding is not None: attrib['encoding'] = outdef.encoding if outdef.schema is", "prm.abstract)) return elist #------------------------------------------------------------------------------- def _encode_literal(prm, is_input): dtype = prm.dtype", "\"\"\" Encode common part of the execure response data input.\"\"\"", "return elist #------------------------------------------------------------------------------- def _encode_literal(prm, is_input): dtype = prm.dtype elem", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR", "encode_input_descr(prm): \"\"\" Encode process description input.\"\"\" elem = NIL(\"Input\", *_encode_param_common(prm))", "*_encode_param_common(prm)) if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, False)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm,", "is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default is not None: elem.append(NIL(\"DefaultValue\", str(prm.default))) return", "enum, ranges, elist = None, [], [] if isinstance(avobj, AllowedAny):", "elem.append(_encode_bbox(prm, True)) return elem def encode_output_descr(prm): \"\"\" Encode process description", "elem.append(_encode_complex(prm, False)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, False)) return elem def", "parameters' XML encoders # # Project: EOxServer <http://eoxserver.org> # Authors:", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER", "if is_input else \"BoundingBoxOutput\", NIL(\"Default\", NIL(\"CRS\", prm.encode_crs(prm.default_crs))), NIL(\"Supported\", *[NIL(\"CRS\", prm.encode_crs(crs))", "elem #------------------------------------------------------------------------------- def _encode_bbox(prm, is_input): return NIL(\"BoundingBoxData\" if is_input else", "_encode_bbox(prm, is_input): return NIL(\"BoundingBoxData\" if is_input else \"BoundingBoxOutput\", NIL(\"Default\", NIL(\"CRS\",", "so, subject to the following conditions: # # The above", "is_input): dtype = prm.dtype elem = NIL(\"LiteralData\" if is_input else", "attrib['asReference'] = 'true' if outdef.as_reference else 'false' return WPS(\"Output\", *_encode_param_common(outdef,", "# Authors: <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # #------------------------------------------------------------------------------- #", "attrib['uom'] = outdef.uom if outdef.crs is not None: attrib['crs'] =", "*_encode_param_common(prm)) def encode_output_def(outdef): \"\"\" Encode the execure response output definition.\"\"\"", "attrib['mimeType'] = outdef.mime_type if outdef.encoding is not None: attrib['encoding'] =", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #", "elif isinstance(avobj, AllowedRange): ranges = [avobj] elif isinstance(avobj, AllowedRangeCollection): enum,", "def _encode_complex(prm, is_input): return NIL(\"ComplexData\" if is_input else \"ComplexOutput\", NIL(\"Default\",", "None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if range_.spacing is not None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing)))", "if prm.default is not None: elem.append(NIL(\"DefaultValue\", str(prm.default))) return elem def", "is_input else \"BoundingBoxOutput\", NIL(\"Default\", NIL(\"CRS\", prm.encode_crs(prm.default_crs))), NIL(\"Supported\", *[NIL(\"CRS\", prm.encode_crs(crs)) for", "DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF", "#------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters import ( LiteralData, ComplexData, BoundingBoxData, AllowedAny, AllowedEnum,", "encoders # # Project: EOxServer <http://eoxserver.org> # Authors: <NAME> <<EMAIL>>", "return elem def _encode_allowed_value(avobj): enum, ranges, elist = None, [],", "outdef.mime_type if outdef.encoding is not None: attrib['encoding'] = outdef.encoding if", "if isinstance(avobj, AllowedAny): return OWS(\"AnyValue\") elif isinstance(avobj, AllowedByReference): return WPS(\"ValuesReference\",", "= dtype.get_diff_dtype() if enum is not None: elist.extend(OWS(\"Value\", dtype.encode(v)) for", "#------------------------------------------------------------------------------- def _encode_bbox(prm, is_input): return NIL(\"BoundingBoxData\" if is_input else \"BoundingBoxOutput\",", "= outdef.mime_type if outdef.encoding is not None: attrib['encoding'] = outdef.encoding", "of the execure response data output.\"\"\" return WPS(\"Output\", *_encode_param_common(prm)) def", "the following conditions: # # The above copyright notice and", "FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE", "frmt.encoding)) if frmt.schema is not None: elem.append(NIL(\"Schema\", frmt.schema)) return elem", "of this Software or works derived from this Software. #", "isinstance(prm, LiteralData): elem.append(_encode_literal(prm, True)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, True)) elif", "ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms, **attr)) return OWS(\"AllowedValues\", *elist) #------------------------------------------------------------------------------- def _encode_complex(prm,", "all # copies of this Software or works derived from", "ComplexData): elem.append(_encode_complex(prm, True)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, True)) return elem", "is not None: elem.append(NIL(\"Encoding\", frmt.encoding)) if frmt.schema is not None:", "else \"BoundingBoxOutput\", NIL(\"Default\", NIL(\"CRS\", prm.encode_crs(prm.default_crs))), NIL(\"Supported\", *[NIL(\"CRS\", prm.encode_crs(crs)) for crs", "description input.\"\"\" elem = NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"] = (\"1\", \"0\")[bool(prm.is_optional)]", "copies of this Software or works derived from this Software.", "if prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\", u) for", "the Software, and to permit persons to whom the Software", "= {}, [] if range_.closure != 'closed': attr = {ns_ows(\"rangeClosure\"):", "AllowedByReference): return WPS(\"ValuesReference\", **{ ns_ows(\"reference\"): avobj.url, \"valuesForm\": avobj.url, }) elif", "if frmt.schema is not None: elem.append(NIL(\"Schema\", frmt.schema)) return elem #-------------------------------------------------------------------------------", "return WPS(\"Input\", *_encode_param_common(prm, False)) def encode_output_exec(prm): \"\"\" Encode common part", "return elem def encode_output_descr(prm): \"\"\" Encode process description output.\"\"\" elem", "Project: EOxServer <http://eoxserver.org> # Authors: <NAME> <<EMAIL>> # <NAME> <<EMAIL>>", "is_input): return NIL(\"ComplexData\" if is_input else \"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\",", "BoundingBoxData): elem.append(_encode_bbox(prm, True)) return elem def encode_output_descr(prm): \"\"\" Encode process", "return WPS(\"ValuesReference\", **{ ns_ows(\"reference\"): avobj.url, \"valuesForm\": avobj.url, }) elif isinstance(avobj,", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "persons to whom the Software is # furnished to do", "OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE.", "associated documentation files (the \"Software\"), to deal # in the", "outdef.crs is not None: attrib['crs'] = outdef.crs if outdef.mime_type is", "None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if range_.maxval is not None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval)))", "\"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\",", "data output.\"\"\" return WPS(\"Output\", *_encode_param_common(prm)) def encode_output_def(outdef): \"\"\" Encode the", "this Software. # # THE SOFTWARE IS PROVIDED \"AS IS\",", "raise TypeError(\"Invalid allowed value object! OBJ=%r\"%avobj) dtype = avobj.dtype ddtype", "MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "to any person obtaining a copy # of this software", "None: attrib['asReference'] = 'true' if outdef.as_reference else 'false' return WPS(\"Output\",", "not None: elem.append(NIL(\"Encoding\", frmt.encoding)) if frmt.schema is not None: elem.append(NIL(\"Schema\",", "this software and associated documentation files (the \"Software\"), to deal", "of the Software, and to permit persons to whom the", "DEALINGS IN # THE SOFTWARE. #------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters import (", "ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN", "#------------------------------------------------------------------------------- def encode_input_descr(prm): \"\"\" Encode process description input.\"\"\" elem =", "LiteralData): elem.append(_encode_literal(prm, False)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, False)) elif isinstance(prm,", "is not None: attrib['crs'] = outdef.crs if outdef.mime_type is not", "is not None: attrib['encoding'] = outdef.encoding if outdef.schema is not", "[] if isinstance(avobj, AllowedAny): return OWS(\"AnyValue\") elif isinstance(avobj, AllowedByReference): return", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY,", "= avobj.enum, avobj.ranges else: raise TypeError(\"Invalid allowed value object! OBJ=%r\"%avobj)", "permission notice shall be included in all # copies of", "AllowedEnum, AllowedRange, AllowedRangeCollection, AllowedByReference, ) from eoxserver.services.ows.wps.v10.util import ( OWS,", "attrib['crs'] = outdef.crs if outdef.mime_type is not None: attrib['mimeType'] =", "not None: attrib['uom'] = outdef.uom if outdef.crs is not None:", "included in all # copies of this Software or works", "response data input.\"\"\" return WPS(\"Input\", *_encode_param_common(prm, False)) def encode_output_exec(prm): \"\"\"", "(C) 2013 EOX IT Services GmbH # # Permission is", "Software is # furnished to do so, subject to the", "NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"] = (\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] = \"1\" if", "for u in prm.uoms]) )) if is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default", "not None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms, **attr)) return OWS(\"AllowedValues\", *elist)", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if range_.maxval is not None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if", "whom the Software is # furnished to do so, subject", "from this Software. # # THE SOFTWARE IS PROVIDED \"AS", "return NIL(\"ComplexData\" if is_input else \"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f)", "sublicense, and/or sell # copies of the Software, and to", "in prm.formats.itervalues()]) ) def _encode_format(frmt): elem = NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type))", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "= outdef.schema if outdef.as_reference is not None: attrib['asReference'] = 'true'", "True)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, True)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm,", "do so, subject to the following conditions: # # The", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING", "if range_.minval is not None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if range_.maxval is", "in the Software without restriction, including without limitation the rights", "eoxserver.services.ows.wps.parameters import ( LiteralData, ComplexData, BoundingBoxData, AllowedAny, AllowedEnum, AllowedRange, AllowedRangeCollection,", "input.\"\"\" elem = NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"] = (\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"]", "derived from this Software. # # THE SOFTWARE IS PROVIDED", "# furnished to do so, subject to the following conditions:", "any person obtaining a copy # of this software and", "ARISING FROM, # OUT OF OR IN CONNECTION WITH THE", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "execure response output definition.\"\"\" attrib = {} if outdef.uom is", "**attr)) return OWS(\"AllowedValues\", *elist) #------------------------------------------------------------------------------- def _encode_complex(prm, is_input): return NIL(\"ComplexData\"", "KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO", "= avobj.dtype ddtype = dtype.get_diff_dtype() if enum is not None:", "THE SOFTWARE. #------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters import ( LiteralData, ComplexData, BoundingBoxData,", "OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "restriction, including without limitation the rights # to use, copy,", "return elem def encode_input_exec(prm): \"\"\" Encode common part of the", "dtype.encode(v)) for v in enum.values) for range_ in ranges: attr,", "Authors: <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # #------------------------------------------------------------------------------- # Copyright", "elms = {}, [] if range_.closure != 'closed': attr =", "including without limitation the rights # to use, copy, modify,", "copyright notice and this permission notice shall be included in", "#------------------------------------------------------------------------------- # Copyright (C) 2013 EOX IT Services GmbH #", "*elist) #------------------------------------------------------------------------------- def _encode_complex(prm, is_input): return NIL(\"ComplexData\" if is_input else", "dtype.get_diff_dtype() if enum is not None: elist.extend(OWS(\"Value\", dtype.encode(v)) for v", "ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED", "free of charge, to any person obtaining a copy #", "files (the \"Software\"), to deal # in the Software without", "def _encode_format(frmt): elem = NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type)) if frmt.encoding is", "[] if range_.closure != 'closed': attr = {ns_ows(\"rangeClosure\"): range_.closure} if", "isinstance(avobj, AllowedByReference): return WPS(\"ValuesReference\", **{ ns_ows(\"reference\"): avobj.url, \"valuesForm\": avobj.url, })", "Encode common part of the execure response data input.\"\"\" return", "sub-elements of all XML parameters.\"\"\" elist = [OWS(\"Identifier\", prm.identifier)] if", "\"valuesForm\": avobj.url, }) elif isinstance(avobj, AllowedEnum): enum = avobj elif", "[OWS(\"Identifier\", prm.identifier)] if prm.title or title_required: elist.append(OWS(\"Title\", prm.title or prm.identifier))", "AllowedByReference, ) from eoxserver.services.ows.wps.v10.util import ( OWS, WPS, NIL, ns_ows,", "range_.maxval is not None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if range_.spacing is not", "# #------------------------------------------------------------------------------- # Copyright (C) 2013 EOX IT Services GmbH", "<<EMAIL>> # #------------------------------------------------------------------------------- # Copyright (C) 2013 EOX IT Services", "is not None: attrib['uom'] = outdef.uom if outdef.crs is not", "= NIL(\"Output\", *_encode_param_common(prm)) if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, False)) elif isinstance(prm,", "NIL, ns_ows, ) #------------------------------------------------------------------------------- def encode_input_descr(prm): \"\"\" Encode process description", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "= prm.dtype elem = NIL(\"LiteralData\" if is_input else \"LiteralOutput\") elem.append(OWS(\"DataType\",", "if is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default is not None: elem.append(NIL(\"DefaultValue\", str(prm.default)))", "<NAME> <<EMAIL>> # <NAME> <<EMAIL>> # #------------------------------------------------------------------------------- # Copyright (C)", "ddtype = dtype.get_diff_dtype() if enum is not None: elist.extend(OWS(\"Value\", dtype.encode(v))", "ns_ows, ) #------------------------------------------------------------------------------- def encode_input_descr(prm): \"\"\" Encode process description input.\"\"\"", "prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\", u) for u", "[avobj] elif isinstance(avobj, AllowedRangeCollection): enum, ranges = avobj.enum, avobj.ranges else:", "Encode process description output.\"\"\" elem = NIL(\"Output\", *_encode_param_common(prm)) if isinstance(prm,", "None: elem.append(NIL(\"DefaultValue\", str(prm.default))) return elem def _encode_allowed_value(avobj): enum, ranges, elist", "in prm.uoms]) )) if is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default is not", "input.\"\"\" return WPS(\"Input\", *_encode_param_common(prm, False)) def encode_output_exec(prm): \"\"\" Encode common", "2013 EOX IT Services GmbH # # Permission is hereby", "not None: attrib['schema'] = outdef.schema if outdef.as_reference is not None:", "None: attrib['uom'] = outdef.uom if outdef.crs is not None: attrib['crs']", "elist = [OWS(\"Identifier\", prm.identifier)] if prm.title or title_required: elist.append(OWS(\"Title\", prm.title", "avobj.url, \"valuesForm\": avobj.url, }) elif isinstance(avobj, AllowedEnum): enum = avobj", "NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\", u) for u in prm.uoms])", "output.\"\"\" return WPS(\"Output\", *_encode_param_common(prm)) def encode_output_def(outdef): \"\"\" Encode the execure", "not None: elem.append(NIL(\"DefaultValue\", str(prm.default))) return elem def _encode_allowed_value(avobj): enum, ranges,", ") from eoxserver.services.ows.wps.v10.util import ( OWS, WPS, NIL, ns_ows, )", "# Project: EOxServer <http://eoxserver.org> # Authors: <NAME> <<EMAIL>> # <NAME>", "ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\",", "avobj.dtype ddtype = dtype.get_diff_dtype() if enum is not None: elist.extend(OWS(\"Value\",", "str(prm.default))) return elem def _encode_allowed_value(avobj): enum, ranges, elist = None,", "elist = None, [], [] if isinstance(avobj, AllowedAny): return OWS(\"AnyValue\")", "range_ in ranges: attr, elms = {}, [] if range_.closure", "prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\", u) for u in prm.uoms]) )) if", "def encode_input_exec(prm): \"\"\" Encode common part of the execure response", "of this software and associated documentation files (the \"Software\"), to", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "title_required: elist.append(OWS(\"Title\", prm.title or prm.identifier)) if prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract)) return", "import ( OWS, WPS, NIL, ns_ows, ) #------------------------------------------------------------------------------- def encode_input_descr(prm):", "allowed value object! OBJ=%r\"%avobj) dtype = avobj.dtype ddtype = dtype.get_diff_dtype()", "outdef.as_reference is not None: attrib['asReference'] = 'true' if outdef.as_reference else", "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS", "range_.spacing is not None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms, **attr)) return", "if is_input else \"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name, **{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, }))", "ComplexData, BoundingBoxData, AllowedAny, AllowedEnum, AllowedRange, AllowedRangeCollection, AllowedByReference, ) from eoxserver.services.ows.wps.v10.util", "def encode_input_descr(prm): \"\"\" Encode process description input.\"\"\" elem = NIL(\"Input\",", "(the \"Software\"), to deal # in the Software without restriction,", "def encode_output_exec(prm): \"\"\" Encode common part of the execure response", "def encode_output_descr(prm): \"\"\" Encode process description output.\"\"\" elem = NIL(\"Output\",", "isinstance(avobj, AllowedRangeCollection): enum, ranges = avobj.enum, avobj.ranges else: raise TypeError(\"Invalid", "common sub-elements of all XML parameters.\"\"\" elist = [OWS(\"Identifier\", prm.identifier)]", "# <NAME> <<EMAIL>> # #------------------------------------------------------------------------------- # Copyright (C) 2013 EOX", "OWS, WPS, NIL, ns_ows, ) #------------------------------------------------------------------------------- def encode_input_descr(prm): \"\"\" Encode", "WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND", "= {ns_ows(\"rangeClosure\"): range_.closure} if range_.minval is not None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval)))", "charge, to any person obtaining a copy # of this", "permit persons to whom the Software is # furnished to", "process description input.\"\"\" elem = NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"] = (\"1\",", "THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE", "= None, [], [] if isinstance(avobj, AllowedAny): return OWS(\"AnyValue\") elif", "_encode_literal(prm, is_input): dtype = prm.dtype elem = NIL(\"LiteralData\" if is_input", "WPS(\"ValuesReference\", **{ ns_ows(\"reference\"): avobj.url, \"valuesForm\": avobj.url, }) elif isinstance(avobj, AllowedEnum):", "the Software is # furnished to do so, subject to", "above copyright notice and this permission notice shall be included", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED,", "avobj.url, }) elif isinstance(avobj, AllowedEnum): enum = avobj elif isinstance(avobj,", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "limitation the rights # to use, copy, modify, merge, publish,", "if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, True)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, True))", "output.\"\"\" elem = NIL(\"Output\", *_encode_param_common(prm)) if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, False))", "OBJ=%r\"%avobj) dtype = avobj.dtype ddtype = dtype.get_diff_dtype() if enum is", "NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f) for f in prm.formats.itervalues()]) ) def", "outdef.uom is not None: attrib['uom'] = outdef.uom if outdef.crs is", "Services GmbH # # Permission is hereby granted, free of", "attr = {ns_ows(\"rangeClosure\"): range_.closure} if range_.minval is not None: elms.append(OWS(\"MinimumValue\",", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #", "AllowedRangeCollection, AllowedByReference, ) from eoxserver.services.ows.wps.v10.util import ( OWS, WPS, NIL,", "isinstance(avobj, AllowedAny): return OWS(\"AnyValue\") elif isinstance(avobj, AllowedByReference): return WPS(\"ValuesReference\", **{", "**attrib) def _encode_param_common(prm, title_required=True): \"\"\" Encode common sub-elements of all", "without limitation the rights # to use, copy, modify, merge,", "elem def _encode_allowed_value(avobj): enum, ranges, elist = None, [], []", "ranges = avobj.enum, avobj.ranges else: raise TypeError(\"Invalid allowed value object!", "not None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if range_.maxval is not None: elms.append(OWS(\"MaximumValue\",", "works derived from this Software. # # THE SOFTWARE IS", "False)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, False)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm,", "if prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract)) return elist #------------------------------------------------------------------------------- def _encode_literal(prm, is_input):", "EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "elem.attrib[\"maxOccurs\"] = \"1\" if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, True)) elif isinstance(prm,", "encode_output_descr(prm): \"\"\" Encode process description output.\"\"\" elem = NIL(\"Output\", *_encode_param_common(prm))", "# in the Software without restriction, including without limitation the", "documentation files (the \"Software\"), to deal # in the Software", "def encode_output_def(outdef): \"\"\" Encode the execure response output definition.\"\"\" attrib", "elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, True)) return elem def encode_output_descr(prm): \"\"\"", "return OWS(\"AnyValue\") elif isinstance(avobj, AllowedByReference): return WPS(\"ValuesReference\", **{ ns_ows(\"reference\"): avobj.url,", "#------------------------------------------------------------------------------- def _encode_complex(prm, is_input): return NIL(\"ComplexData\" if is_input else \"ComplexOutput\",", "outdef.uom if outdef.crs is not None: attrib['crs'] = outdef.crs if", "common part of the execure response data input.\"\"\" return WPS(\"Input\",", "Encode common part of the execure response data output.\"\"\" return", "# THE SOFTWARE. #------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters import ( LiteralData, ComplexData,", "None: elem.append(NIL(\"Schema\", frmt.schema)) return elem #------------------------------------------------------------------------------- def _encode_bbox(prm, is_input): return", "not None: elem.append(NIL(\"Schema\", frmt.schema)) return elem #------------------------------------------------------------------------------- def _encode_bbox(prm, is_input):", "= NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"] = (\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] = \"1\"", "None: attrib['encoding'] = outdef.encoding if outdef.schema is not None: attrib['schema']", "this Software or works derived from this Software. # #", "XML parameters.\"\"\" elist = [OWS(\"Identifier\", prm.identifier)] if prm.title or title_required:", "= [OWS(\"Identifier\", prm.identifier)] if prm.title or title_required: elist.append(OWS(\"Title\", prm.title or", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT", "part of the execure response data input.\"\"\" return WPS(\"Input\", *_encode_param_common(prm,", "sell # copies of the Software, and to permit persons", "common part of the execure response data output.\"\"\" return WPS(\"Output\",", "OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT", "elist.extend(OWS(\"Value\", dtype.encode(v)) for v in enum.values) for range_ in ranges:", "Encode process description input.\"\"\" elem = NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"] =", "in enum.values) for range_ in ranges: attr, elms = {},", "None, [], [] if isinstance(avobj, AllowedAny): return OWS(\"AnyValue\") elif isinstance(avobj,", "SOFTWARE. #------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters import ( LiteralData, ComplexData, BoundingBoxData, AllowedAny,", "elem.append(_encode_complex(prm, True)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, True)) return elem def", "OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN #", "prm.identifier)] if prm.title or title_required: elist.append(OWS(\"Title\", prm.title or prm.identifier)) if", "{}, [] if range_.closure != 'closed': attr = {ns_ows(\"rangeClosure\"): range_.closure}", "publish, distribute, sublicense, and/or sell # copies of the Software,", "AllowedRange, AllowedRangeCollection, AllowedByReference, ) from eoxserver.services.ows.wps.v10.util import ( OWS, WPS,", "to the following conditions: # # The above copyright notice", "OWS(\"AnyValue\") elif isinstance(avobj, AllowedByReference): return WPS(\"ValuesReference\", **{ ns_ows(\"reference\"): avobj.url, \"valuesForm\":", "if prm.title or title_required: elist.append(OWS(\"Title\", prm.title or prm.identifier)) if prm.abstract:", "and this permission notice shall be included in all #", "AllowedEnum): enum = avobj elif isinstance(avobj, AllowedRange): ranges = [avobj]", "BoundingBoxData, AllowedAny, AllowedEnum, AllowedRange, AllowedRangeCollection, AllowedByReference, ) from eoxserver.services.ows.wps.v10.util import", "NIL(\"Supported\", *[OWS(\"UOM\", u) for u in prm.uoms]) )) if is_input:", "modify, merge, publish, distribute, sublicense, and/or sell # copies of", "*_encode_param_common(prm, False)) def encode_output_exec(prm): \"\"\" Encode common part of the", "OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION", "return elem #------------------------------------------------------------------------------- def _encode_bbox(prm, is_input): return NIL(\"BoundingBoxData\" if is_input", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "Software, and to permit persons to whom the Software is", "( OWS, WPS, NIL, ns_ows, ) #------------------------------------------------------------------------------- def encode_input_descr(prm): \"\"\"", "attrib = {} if outdef.uom is not None: attrib['uom'] =", "def _encode_bbox(prm, is_input): return NIL(\"BoundingBoxData\" if is_input else \"BoundingBoxOutput\", NIL(\"Default\",", "for f in prm.formats.itervalues()]) ) def _encode_format(frmt): elem = NIL(\"Format\",", "# to use, copy, modify, merge, publish, distribute, sublicense, and/or", "shall be included in all # copies of this Software", "OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "the execure response data output.\"\"\" return WPS(\"Output\", *_encode_param_common(prm)) def encode_output_def(outdef):", "isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, False)) return elem def encode_input_exec(prm): \"\"\" Encode", "else: raise TypeError(\"Invalid allowed value object! OBJ=%r\"%avobj) dtype = avobj.dtype", "in all # copies of this Software or works derived", "True)) return elem def encode_output_descr(prm): \"\"\" Encode process description output.\"\"\"", "= 'true' if outdef.as_reference else 'false' return WPS(\"Output\", *_encode_param_common(outdef, False),", "is not None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if range_.spacing is not None:", "prm.default is not None: elem.append(NIL(\"DefaultValue\", str(prm.default))) return elem def _encode_allowed_value(avobj):", "elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if range_.spacing is not None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\",", "= NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type)) if frmt.encoding is not None: elem.append(NIL(\"Encoding\",", "\"Software\"), to deal # in the Software without restriction, including", "NIL(\"Supported\", *[_encode_format(f) for f in prm.formats.itervalues()]) ) def _encode_format(frmt): elem", "prm.title or title_required: elist.append(OWS(\"Title\", prm.title or prm.identifier)) if prm.abstract: elist.append(OWS(\"Abstract\",", "= NIL(\"LiteralData\" if is_input else \"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name, **{ ns_ows(\"reference\"):", "*_encode_param_common(outdef, False), **attrib) def _encode_param_common(prm, title_required=True): \"\"\" Encode common sub-elements", "if range_.maxval is not None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if range_.spacing is", "object! OBJ=%r\"%avobj) dtype = avobj.dtype ddtype = dtype.get_diff_dtype() if enum", "# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "def _encode_literal(prm, is_input): dtype = prm.dtype elem = NIL(\"LiteralData\" if", "if outdef.as_reference else 'false' return WPS(\"Output\", *_encode_param_common(outdef, False), **attrib) def", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "or works derived from this Software. # # THE SOFTWARE", "elem.append(_encode_bbox(prm, False)) return elem def encode_input_exec(prm): \"\"\" Encode common part", "{} if outdef.uom is not None: attrib['uom'] = outdef.uom if", "def _encode_allowed_value(avobj): enum, ranges, elist = None, [], [] if", "elem def encode_output_descr(prm): \"\"\" Encode process description output.\"\"\" elem =", "process description output.\"\"\" elem = NIL(\"Output\", *_encode_param_common(prm)) if isinstance(prm, LiteralData):", "elem.append(_encode_literal(prm, False)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, False)) elif isinstance(prm, BoundingBoxData):", "isinstance(prm, ComplexData): elem.append(_encode_complex(prm, True)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, True)) return", "if outdef.encoding is not None: attrib['encoding'] = outdef.encoding if outdef.schema", "attrib['schema'] = outdef.schema if outdef.as_reference is not None: attrib['asReference'] =", "NIL(\"LiteralData\" if is_input else \"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name, **{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name,", ") #------------------------------------------------------------------------------- def encode_input_descr(prm): \"\"\" Encode process description input.\"\"\" elem", "# copies of the Software, and to permit persons to", "SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE", "= avobj elif isinstance(avobj, AllowedRange): ranges = [avobj] elif isinstance(avobj,", "# copies of this Software or works derived from this", "<http://eoxserver.org> # Authors: <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # #-------------------------------------------------------------------------------", "granted, free of charge, to any person obtaining a copy", "elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default is not None: elem.append(NIL(\"DefaultValue\", str(prm.default))) return elem", "attr, elms = {}, [] if range_.closure != 'closed': attr", ")) if is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default is not None: elem.append(NIL(\"DefaultValue\",", "else \"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name, **{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if prm.uoms:", "outdef.schema if outdef.as_reference is not None: attrib['asReference'] = 'true' if", "# # WPS 1.0 parameters' XML encoders # # Project:", "obtaining a copy # of this software and associated documentation", "or title_required: elist.append(OWS(\"Title\", prm.title or prm.identifier)) if prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract))", "GmbH # # Permission is hereby granted, free of charge,", "= [avobj] elif isinstance(avobj, AllowedRangeCollection): enum, ranges = avobj.enum, avobj.ranges", "avobj.enum, avobj.ranges else: raise TypeError(\"Invalid allowed value object! OBJ=%r\"%avobj) dtype", "TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN", "prm.dtype elem = NIL(\"LiteralData\" if is_input else \"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name,", "is # furnished to do so, subject to the following", "for range_ in ranges: attr, elms = {}, [] if", "to whom the Software is # furnished to do so,", "None: attrib['crs'] = outdef.crs if outdef.mime_type is not None: attrib['mimeType']", "**{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])),", "elem.append(NIL(\"Encoding\", frmt.encoding)) if frmt.schema is not None: elem.append(NIL(\"Schema\", frmt.schema)) return", "copy # of this software and associated documentation files (the", "THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "Permission is hereby granted, free of charge, to any person", "all XML parameters.\"\"\" elist = [OWS(\"Identifier\", prm.identifier)] if prm.title or", "avobj elif isinstance(avobj, AllowedRange): ranges = [avobj] elif isinstance(avobj, AllowedRangeCollection):", "OTHER DEALINGS IN # THE SOFTWARE. #------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters import", "NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type)) if frmt.encoding is not None: elem.append(NIL(\"Encoding\", frmt.encoding))", "else \"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f) for f in prm.formats.itervalues()])", "WPS 1.0 parameters' XML encoders # # Project: EOxServer <http://eoxserver.org>", "#------------------------------------------------------------------------------- # # WPS 1.0 parameters' XML encoders # #", "None: elist.extend(OWS(\"Value\", dtype.encode(v)) for v in enum.values) for range_ in", "The above copyright notice and this permission notice shall be", "not None: attrib['crs'] = outdef.crs if outdef.mime_type is not None:", "is_input): return NIL(\"BoundingBoxData\" if is_input else \"BoundingBoxOutput\", NIL(\"Default\", NIL(\"CRS\", prm.encode_crs(prm.default_crs))),", "AllowedAny): return OWS(\"AnyValue\") elif isinstance(avobj, AllowedByReference): return WPS(\"ValuesReference\", **{ ns_ows(\"reference\"):", "dtype.encode(range_.maxval))) if range_.spacing is not None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms,", "*[_encode_format(f) for f in prm.formats.itervalues()]) ) def _encode_format(frmt): elem =", "*elms, **attr)) return OWS(\"AllowedValues\", *elist) #------------------------------------------------------------------------------- def _encode_complex(prm, is_input): return", "WPS(\"Output\", *_encode_param_common(prm)) def encode_output_def(outdef): \"\"\" Encode the execure response output", "= outdef.crs if outdef.mime_type is not None: attrib['mimeType'] = outdef.mime_type", "prm.uoms]) )) if is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default is not None:", "elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, True)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, True))", "if range_.closure != 'closed': attr = {ns_ows(\"rangeClosure\"): range_.closure} if range_.minval", "title_required=True): \"\"\" Encode common sub-elements of all XML parameters.\"\"\" elist", "WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT", "prm.formats.itervalues()]) ) def _encode_format(frmt): elem = NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type)) if", "WPS, NIL, ns_ows, ) #------------------------------------------------------------------------------- def encode_input_descr(prm): \"\"\" Encode process", "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "dtype = prm.dtype elem = NIL(\"LiteralData\" if is_input else \"LiteralOutput\")", "!= 'closed': attr = {ns_ows(\"rangeClosure\"): range_.closure} if range_.minval is not", "to permit persons to whom the Software is # furnished", "WPS(\"Output\", *_encode_param_common(outdef, False), **attrib) def _encode_param_common(prm, title_required=True): \"\"\" Encode common", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING" ]
[ "' bucket. ERROR\\n%s' %e) raise return archive_object def build(vp_save={}): '''", "raise ValueError() archive_file = __archive_key(save_data) + '/' + vp_save_pk +", "to ' + bucket + ' bucket. ERROR\\n%s' %e) raise", "for the archive. This value is required. :param str vp_save_pk:", "return vp_save def archive(bucket, vp_save_pk, save_data): ''' Archives a vp", "This value is required. ''' if archive_key is None or", "+ vp_save_pk + '.json' # Upload curation data to S3", "Exception as e: print('ERROR: Error downloading ' + key +", "any of the given attributes. ''' vp_save['PK'] = str(uuid.uuid4()) #", "+ bucket + ' bucket. ERROR\\n%s' %e) raise return archive_object", "archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except Exception as e: print('ERROR: Error", "location of the archive depends on the bucket and the", "file. This value is required. :param obj save_data: The save", "__archive_key(save_data) + '/' + vp_save_pk + '.json' # Upload curation", "from ' + bucket + ' bucket. ERROR\\n%s' %e) raise", "vp_save data's location (S3 bucket and file path). This value", "S3 archive bucket. s3_client = S3Client() try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket,", "given attributes. ''' vp_save['PK'] = str(uuid.uuid4()) # Set timestamps (for", "a valid vp_save object. Builds a new vp_save object by", "+ ' bucket. ERROR\\n%s' %e) raise return archive_object def build(vp_save={}):", "'.json' # Upload curation data to S3 archive bucket. s3_client", "to archive. This value is required. ''' if bucket is", "key)['Body'].read(),parse_float=Decimal) except Exception as e: print('ERROR: Error downloading ' +", "a new vp_save object by creating default values for required", "data object as a JSON file to S3. The location", "for required fields and combines any of the given attributes.", "def build(vp_save={}): ''' Builds and returns a valid vp_save object.", "S3. Uploads the save data object as a JSON file", "None or len(vp_save_pk) <= 0: raise ValueError() if not save_data:", "None or '/' not in archive_key: raise ValueError() bucket, key", "src.db.s3_client import Client as S3Client from decimal import Decimal def", "archive depends on the bucket and the primary key of", "valid vp_save object. Builds a new vp_save object by creating", "= S3Client() try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file ) except Exception", "as S3Client from decimal import Decimal def get_from_archive(archive_key): ''' Download", "+ key + ' from ' + bucket + '", "If the upload fails, an exception is raised. If successful,", "s3_client = S3Client() try: archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except Exception", "datetime import uuid import simplejson as json from src.db.s3_client import", "archive. This value is required. ''' if bucket is None", "VP Save from S3. :param str archive_key: The vp_save data's", "upload fails, an exception is raised. If successful, returns the", "required. :param str vp_save_pk: The vp_save PK to use as", "The location of the archive depends on the bucket and", "if bucket is None or len(bucket) <= 0: raise ValueError()", "' + bucket + ' bucket. ERROR\\n%s' %e) raise return", "raise archive_key_comps = [bucket, archive_file] return '/'.join(archive_key_comps) def __archive_key(save_data): return", "save data. If the upload fails, an exception is raised.", "is required. :param str vp_save_pk: The vp_save PK to use", "raise return archive_object def build(vp_save={}): ''' Builds and returns a", "s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file ) except Exception as e: print('ERROR:", "default values for required fields and combines any of the", "vp_save_pk, save_data): ''' Archives a vp save data to S3.", "the given attributes. ''' vp_save['PK'] = str(uuid.uuid4()) # Set timestamps", "+ archive_file + ' to ' + bucket + '", "# Set timestamps (for new data) now = datetime.datetime.now().isoformat() vp_save['date_created']", "+ ' from ' + bucket + ' bucket. ERROR\\n%s'", "use as the name of the JSON file. This value", "required. ''' if archive_key is None or '/' not in", "' from ' + bucket + ' bucket. ERROR\\n%s' %e)", "%e) raise return archive_object def build(vp_save={}): ''' Builds and returns", "bucket + ' bucket. ERROR\\n%s' %e) raise return archive_object def", "'vp_save' return vp_save def archive(bucket, vp_save_pk, save_data): ''' Archives a", "the bucket and the primary key of the save data.", "+ bucket + ' bucket. ERROR\\n%s' %e) raise archive_key_comps =", "value is required. ''' if archive_key is None or '/'", "file to S3. The location of the archive depends on", "bucket. ERROR\\n%s' %e) raise archive_key_comps = [bucket, archive_file] return '/'.join(archive_key_comps)", "S3. :param str archive_key: The vp_save data's location (S3 bucket", "= now vp_save['last_modified'] = now vp_save['item_type'] = 'vp_save' return vp_save", "as e: print('ERROR: Error uploading ' + archive_file + '", "+ ' to ' + bucket + ' bucket. ERROR\\n%s'", "vp_save_pk + '.json' # Upload curation data to S3 archive", "file path). This value is required. ''' if archive_key is", "PK to use as the name of the JSON file.", "of the archive depends on the bucket and the primary", "' to ' + bucket + ' bucket. ERROR\\n%s' %e)", "'/' + vp_save_pk + '.json' # Upload curation data to", "to use as the name of the JSON file. This", "creating default values for required fields and combines any of", "by creating default values for required fields and combines any", "archive_object def build(vp_save={}): ''' Builds and returns a valid vp_save", "''' Download a VP Save from S3. :param str archive_key:", "Client as S3Client from decimal import Decimal def get_from_archive(archive_key): '''", "Archives a vp save data to S3. Uploads the save", "from decimal import Decimal def get_from_archive(archive_key): ''' Download a VP", "is required. ''' if archive_key is None or '/' not", "fails, an exception is raised. If successful, returns the archive", "json from src.db.s3_client import Client as S3Client from decimal import", "The vp_save PK to use as the name of the", "build(vp_save={}): ''' Builds and returns a valid vp_save object. Builds", "import Client as S3Client from decimal import Decimal def get_from_archive(archive_key):", "vp_save_pk is None or len(vp_save_pk) <= 0: raise ValueError() if", "ValueError() if not save_data: raise ValueError() archive_file = __archive_key(save_data) +", "0: raise ValueError() if not save_data: raise ValueError() archive_file =", "S3Client from decimal import Decimal def get_from_archive(archive_key): ''' Download a", "of the given attributes. ''' vp_save['PK'] = str(uuid.uuid4()) # Set", "key of the save data. If the upload fails, an", "The vp_save data's location (S3 bucket and file path). This", "Download a VP Save from S3. :param str archive_key: The", "if archive_key is None or '/' not in archive_key: raise", "save data to S3. Uploads the save data object as", "bucket, key = archive_key.split('/', 1) s3_client = S3Client() try: archive_object", "location (S3 bucket and file path). This value is required.", "= str(uuid.uuid4()) # Set timestamps (for new data) now =", "print('ERROR: Error downloading ' + key + ' from '", "save data object as a JSON file to S3. The", "# Upload curation data to S3 archive bucket. s3_client =", "exception is raised. If successful, returns the archive location. :param", "save_data: raise ValueError() archive_file = __archive_key(save_data) + '/' + vp_save_pk", "import simplejson as json from src.db.s3_client import Client as S3Client", "archive_key: The vp_save data's location (S3 bucket and file path).", "vp_save def archive(bucket, vp_save_pk, save_data): ''' Archives a vp save", "e: print('ERROR: Error downloading ' + key + ' from", "value is required. ''' if bucket is None or len(bucket)", "primary key of the save data. If the upload fails,", "ERROR\\n%s' %e) raise archive_key_comps = [bucket, archive_file] return '/'.join(archive_key_comps) def", "vp_save_pk: The vp_save PK to use as the name of", "data. If the upload fails, an exception is raised. If", "the name of the JSON file. This value is required.", "the save data object as a JSON file to S3.", "fields and combines any of the given attributes. ''' vp_save['PK']", "save data object to archive. This value is required. '''", "get_from_archive(archive_key): ''' Download a VP Save from S3. :param str", "the primary key of the save data. If the upload", "ValueError() archive_file = __archive_key(save_data) + '/' + vp_save_pk + '.json'", "Exception as e: print('ERROR: Error uploading ' + archive_file +", "bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file ) except Exception as e: print('ERROR: Error", "This value is required. :param obj save_data: The save data", "new data) now = datetime.datetime.now().isoformat() vp_save['date_created'] = now vp_save['last_modified'] =", "JSON file to S3. The location of the archive depends", "as the name of the JSON file. This value is", "str vp_save_pk: The vp_save PK to use as the name", "of the save data. If the upload fails, an exception", "a JSON file to S3. The location of the archive", "%e) raise archive_key_comps = [bucket, archive_file] return '/'.join(archive_key_comps) def __archive_key(save_data):", "and returns a valid vp_save object. Builds a new vp_save", "1) s3_client = S3Client() try: archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except", "archive bucket. s3_client = S3Client() try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file", "is None or len(vp_save_pk) <= 0: raise ValueError() if not", "required. :param obj save_data: The save data object to archive.", "data object to archive. This value is required. ''' if", "+ ' bucket. ERROR\\n%s' %e) raise archive_key_comps = [bucket, archive_file]", "= json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except Exception as e: print('ERROR: Error downloading", "try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file ) except Exception as e:", "S3 bucket for the archive. This value is required. :param", "now vp_save['last_modified'] = now vp_save['item_type'] = 'vp_save' return vp_save def", "= 'vp_save' return vp_save def archive(bucket, vp_save_pk, save_data): ''' Archives", "Set timestamps (for new data) now = datetime.datetime.now().isoformat() vp_save['date_created'] =", "as e: print('ERROR: Error downloading ' + key + '", "''' Builds and returns a valid vp_save object. Builds a", "= S3Client() try: archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except Exception as", "from src.db.s3_client import Client as S3Client from decimal import Decimal", "import uuid import simplejson as json from src.db.s3_client import Client", ":param str bucket: The name of the S3 bucket for", "print('ERROR: Error uploading ' + archive_file + ' to '", "vp save data to S3. Uploads the save data object", "archive. This value is required. :param str vp_save_pk: The vp_save", "data's location (S3 bucket and file path). This value is", "If successful, returns the archive location. :param str bucket: The", "str bucket: The name of the S3 bucket for the", "raise ValueError() if not save_data: raise ValueError() archive_file = __archive_key(save_data)", "is None or '/' not in archive_key: raise ValueError() bucket,", "def get_from_archive(archive_key): ''' Download a VP Save from S3. :param", "Uploads the save data object as a JSON file to", "import Decimal def get_from_archive(archive_key): ''' Download a VP Save from", "str archive_key: The vp_save data's location (S3 bucket and file", "vp_save PK to use as the name of the JSON", "object to archive. This value is required. ''' if bucket", "to S3. The location of the archive depends on the", "bucket + ' bucket. ERROR\\n%s' %e) raise archive_key_comps = [bucket,", "vp_save['item_type'] = 'vp_save' return vp_save def archive(bucket, vp_save_pk, save_data): '''", "except Exception as e: print('ERROR: Error uploading ' + archive_file", "successful, returns the archive location. :param str bucket: The name", "''' if bucket is None or len(bucket) <= 0: raise", "to S3. Uploads the save data object as a JSON", "if not save_data: raise ValueError() archive_file = __archive_key(save_data) + '/'", "S3Client() try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file ) except Exception as", "S3. The location of the archive depends on the bucket", "object by creating default values for required fields and combines", "the archive location. :param str bucket: The name of the", "archive(bucket, vp_save_pk, save_data): ''' Archives a vp save data to", "curation data to S3 archive bucket. s3_client = S3Client() try:", "S3Client() try: archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except Exception as e:", "0: raise ValueError() if vp_save_pk is None or len(vp_save_pk) <=", "Builds a new vp_save object by creating default values for", "and file path). This value is required. ''' if archive_key", "+ '.json' # Upload curation data to S3 archive bucket.", "import datetime import uuid import simplejson as json from src.db.s3_client", "vp_save['last_modified'] = now vp_save['item_type'] = 'vp_save' return vp_save def archive(bucket,", "name of the S3 bucket for the archive. This value", "and combines any of the given attributes. ''' vp_save['PK'] =", "archive_key: raise ValueError() bucket, key = archive_key.split('/', 1) s3_client =", "now vp_save['item_type'] = 'vp_save' return vp_save def archive(bucket, vp_save_pk, save_data):", "bucket: The name of the S3 bucket for the archive.", "downloading ' + key + ' from ' + bucket", "archive_file ) except Exception as e: print('ERROR: Error uploading '", "bucket and the primary key of the save data. If", "= __archive_key(save_data) + '/' + vp_save_pk + '.json' # Upload", "vp_save['PK'] = str(uuid.uuid4()) # Set timestamps (for new data) now", "bucket, archive_file ) except Exception as e: print('ERROR: Error uploading", "Upload curation data to S3 archive bucket. s3_client = S3Client()", "on the bucket and the primary key of the save", "data) now = datetime.datetime.now().isoformat() vp_save['date_created'] = now vp_save['last_modified'] = now", "the archive. This value is required. :param str vp_save_pk: The", "values for required fields and combines any of the given", "and the primary key of the save data. If the", "data to S3. Uploads the save data object as a", "simplejson as json from src.db.s3_client import Client as S3Client from", "data to S3 archive bucket. s3_client = S3Client() try: s3_client.put_object(", "' + archive_file + ' to ' + bucket +", "name of the JSON file. This value is required. :param", "Save from S3. :param str archive_key: The vp_save data's location", "= archive_key.split('/', 1) s3_client = S3Client() try: archive_object = json.loads(s3_client.get_object(bucket,", "now = datetime.datetime.now().isoformat() vp_save['date_created'] = now vp_save['last_modified'] = now vp_save['item_type']", "archive location. :param str bucket: The name of the S3", "e: print('ERROR: Error uploading ' + archive_file + ' to", "datetime.datetime.now().isoformat() vp_save['date_created'] = now vp_save['last_modified'] = now vp_save['item_type'] = 'vp_save'", "uploading ' + archive_file + ' to ' + bucket", "' + bucket + ' bucket. ERROR\\n%s' %e) raise archive_key_comps", "bucket is None or len(bucket) <= 0: raise ValueError() if", "of the S3 bucket for the archive. This value is", "' + key + ' from ' + bucket +", "''' vp_save['PK'] = str(uuid.uuid4()) # Set timestamps (for new data)", "is required. ''' if bucket is None or len(bucket) <=", "combines any of the given attributes. ''' vp_save['PK'] = str(uuid.uuid4())", "value is required. :param obj save_data: The save data object", "if vp_save_pk is None or len(vp_save_pk) <= 0: raise ValueError()", "bucket for the archive. This value is required. :param str", "archive_key is None or '/' not in archive_key: raise ValueError()", "The save data object to archive. This value is required.", "<= 0: raise ValueError() if not save_data: raise ValueError() archive_file", "' bucket. ERROR\\n%s' %e) raise archive_key_comps = [bucket, archive_file] return", ":param str vp_save_pk: The vp_save PK to use as the", "raise ValueError() bucket, key = archive_key.split('/', 1) s3_client = S3Client()", "new vp_save object by creating default values for required fields", "ValueError() if vp_save_pk is None or len(vp_save_pk) <= 0: raise", "bucket. s3_client = S3Client() try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file )", "archive_file + ' to ' + bucket + ' bucket.", "json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except Exception as e: print('ERROR: Error downloading '", "uuid import simplejson as json from src.db.s3_client import Client as", "''' Archives a vp save data to S3. Uploads the", "= datetime.datetime.now().isoformat() vp_save['date_created'] = now vp_save['last_modified'] = now vp_save['item_type'] =", "Decimal def get_from_archive(archive_key): ''' Download a VP Save from S3.", ") except Exception as e: print('ERROR: Error uploading ' +", "This value is required. :param str vp_save_pk: The vp_save PK", "bucket. ERROR\\n%s' %e) raise return archive_object def build(vp_save={}): ''' Builds", "len(bucket) <= 0: raise ValueError() if vp_save_pk is None or", "as a JSON file to S3. The location of the", "the upload fails, an exception is raised. If successful, returns", "vp_save object by creating default values for required fields and", "(S3 bucket and file path). This value is required. '''", "an exception is raised. If successful, returns the archive location.", "''' if archive_key is None or '/' not in archive_key:", "The name of the S3 bucket for the archive. This", "the JSON file. This value is required. :param obj save_data:", "+ '/' + vp_save_pk + '.json' # Upload curation data", "archive_file = __archive_key(save_data) + '/' + vp_save_pk + '.json' #", "object. Builds a new vp_save object by creating default values", "the save data. If the upload fails, an exception is", ":param obj save_data: The save data object to archive. This", "vp_save['date_created'] = now vp_save['last_modified'] = now vp_save['item_type'] = 'vp_save' return", "try: archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except Exception as e: print('ERROR:", ":param str archive_key: The vp_save data's location (S3 bucket and", "(for new data) now = datetime.datetime.now().isoformat() vp_save['date_created'] = now vp_save['last_modified']", "path). This value is required. ''' if archive_key is None", "of the JSON file. This value is required. :param obj", "location. :param str bucket: The name of the S3 bucket", "or len(bucket) <= 0: raise ValueError() if vp_save_pk is None", "except Exception as e: print('ERROR: Error downloading ' + key", "ERROR\\n%s' %e) raise return archive_object def build(vp_save={}): ''' Builds and", "decimal import Decimal def get_from_archive(archive_key): ''' Download a VP Save", "def archive(bucket, vp_save_pk, save_data): ''' Archives a vp save data", "<= 0: raise ValueError() if vp_save_pk is None or len(vp_save_pk)", "len(vp_save_pk) <= 0: raise ValueError() if not save_data: raise ValueError()", "Error downloading ' + key + ' from ' +", "obj save_data: The save data object to archive. This value", "This value is required. ''' if bucket is None or", "is raised. If successful, returns the archive location. :param str", "= now vp_save['item_type'] = 'vp_save' return vp_save def archive(bucket, vp_save_pk,", "return archive_object def build(vp_save={}): ''' Builds and returns a valid", "save_data): ''' Archives a vp save data to S3. Uploads", "s3_client = S3Client() try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file ) except", "ValueError() bucket, key = archive_key.split('/', 1) s3_client = S3Client() try:", "returns a valid vp_save object. Builds a new vp_save object", "the archive depends on the bucket and the primary key", "vp_save object. Builds a new vp_save object by creating default", "required. ''' if bucket is None or len(bucket) <= 0:", "key + ' from ' + bucket + ' bucket.", "archive_key_comps = [bucket, archive_file] return '/'.join(archive_key_comps) def __archive_key(save_data): return save_data['PK']", "Error uploading ' + archive_file + ' to ' +", "attributes. ''' vp_save['PK'] = str(uuid.uuid4()) # Set timestamps (for new", "object as a JSON file to S3. The location of", "not save_data: raise ValueError() archive_file = __archive_key(save_data) + '/' +", "None or len(bucket) <= 0: raise ValueError() if vp_save_pk is", "as json from src.db.s3_client import Client as S3Client from decimal", "depends on the bucket and the primary key of the", "is required. :param obj save_data: The save data object to", "bucket and file path). This value is required. ''' if", "a vp save data to S3. Uploads the save data", "a VP Save from S3. :param str archive_key: The vp_save", "returns the archive location. :param str bucket: The name of", "in archive_key: raise ValueError() bucket, key = archive_key.split('/', 1) s3_client", "or len(vp_save_pk) <= 0: raise ValueError() if not save_data: raise", "raise ValueError() if vp_save_pk is None or len(vp_save_pk) <= 0:", "value is required. :param str vp_save_pk: The vp_save PK to", "from S3. :param str archive_key: The vp_save data's location (S3", "the S3 bucket for the archive. This value is required.", "JSON file. This value is required. :param obj save_data: The", "or '/' not in archive_key: raise ValueError() bucket, key =", "str(uuid.uuid4()) # Set timestamps (for new data) now = datetime.datetime.now().isoformat()", "archive_key.split('/', 1) s3_client = S3Client() try: archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal)", "timestamps (for new data) now = datetime.datetime.now().isoformat() vp_save['date_created'] = now", "raised. If successful, returns the archive location. :param str bucket:", "Builds and returns a valid vp_save object. Builds a new", "required fields and combines any of the given attributes. '''", "to S3 archive bucket. s3_client = S3Client() try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')),", "is None or len(bucket) <= 0: raise ValueError() if vp_save_pk", "save_data: The save data object to archive. This value is", "'/' not in archive_key: raise ValueError() bucket, key = archive_key.split('/',", "not in archive_key: raise ValueError() bucket, key = archive_key.split('/', 1)", "key = archive_key.split('/', 1) s3_client = S3Client() try: archive_object =" ]
[ "=========== This example presents the basic usage of brokenaxes \"\"\"", "Basic usage =========== This example presents the basic usage of", "brokenaxes import brokenaxes import numpy as np fig = plt.figure(figsize=(5,2))", "import brokenaxes import numpy as np fig = plt.figure(figsize=(5,2)) bax", "import numpy as np fig = plt.figure(figsize=(5,2)) bax = brokenaxes(xlims=((0,", "= plt.figure(figsize=(5,2)) bax = brokenaxes(xlims=((0, .1), (.4, .7)), ylims=((-1, .7),", "plt.figure(figsize=(5,2)) bax = brokenaxes(xlims=((0, .1), (.4, .7)), ylims=((-1, .7), (.79,", "of brokenaxes \"\"\" import matplotlib.pyplot as plt from brokenaxes import", "\"\"\" Basic usage =========== This example presents the basic usage", "matplotlib.pyplot as plt from brokenaxes import brokenaxes import numpy as", "the basic usage of brokenaxes \"\"\" import matplotlib.pyplot as plt", "This example presents the basic usage of brokenaxes \"\"\" import", "as plt from brokenaxes import brokenaxes import numpy as np", "basic usage of brokenaxes \"\"\" import matplotlib.pyplot as plt from", "brokenaxes \"\"\" import matplotlib.pyplot as plt from brokenaxes import brokenaxes", "hspace=.05) x = np.linspace(0, 1, 100) bax.plot(x, np.sin(10 * x),", "as np fig = plt.figure(figsize=(5,2)) bax = brokenaxes(xlims=((0, .1), (.4,", "= np.linspace(0, 1, 100) bax.plot(x, np.sin(10 * x), label='sin') bax.plot(x,", "fig = plt.figure(figsize=(5,2)) bax = brokenaxes(xlims=((0, .1), (.4, .7)), ylims=((-1,", "100) bax.plot(x, np.sin(10 * x), label='sin') bax.plot(x, np.cos(10 * x),", "from brokenaxes import brokenaxes import numpy as np fig =", "np.sin(10 * x), label='sin') bax.plot(x, np.cos(10 * x), label='cos') bax.legend(loc=3)", "presents the basic usage of brokenaxes \"\"\" import matplotlib.pyplot as", "np fig = plt.figure(figsize=(5,2)) bax = brokenaxes(xlims=((0, .1), (.4, .7)),", "plt from brokenaxes import brokenaxes import numpy as np fig", "bax.plot(x, np.sin(10 * x), label='sin') bax.plot(x, np.cos(10 * x), label='cos')", ".1), (.4, .7)), ylims=((-1, .7), (.79, 1)), hspace=.05) x =", "(.79, 1)), hspace=.05) x = np.linspace(0, 1, 100) bax.plot(x, np.sin(10", "x = np.linspace(0, 1, 100) bax.plot(x, np.sin(10 * x), label='sin')", "usage of brokenaxes \"\"\" import matplotlib.pyplot as plt from brokenaxes", "numpy as np fig = plt.figure(figsize=(5,2)) bax = brokenaxes(xlims=((0, .1),", "import matplotlib.pyplot as plt from brokenaxes import brokenaxes import numpy", "np.linspace(0, 1, 100) bax.plot(x, np.sin(10 * x), label='sin') bax.plot(x, np.cos(10", "bax = brokenaxes(xlims=((0, .1), (.4, .7)), ylims=((-1, .7), (.79, 1)),", "(.4, .7)), ylims=((-1, .7), (.79, 1)), hspace=.05) x = np.linspace(0,", "1)), hspace=.05) x = np.linspace(0, 1, 100) bax.plot(x, np.sin(10 *", "= brokenaxes(xlims=((0, .1), (.4, .7)), ylims=((-1, .7), (.79, 1)), hspace=.05)", "x), label='sin') bax.plot(x, np.cos(10 * x), label='cos') bax.legend(loc=3) bax.set_xlabel('time') bax.set_ylabel('value')", "brokenaxes(xlims=((0, .1), (.4, .7)), ylims=((-1, .7), (.79, 1)), hspace=.05) x", "brokenaxes import numpy as np fig = plt.figure(figsize=(5,2)) bax =", "\"\"\" import matplotlib.pyplot as plt from brokenaxes import brokenaxes import", "ylims=((-1, .7), (.79, 1)), hspace=.05) x = np.linspace(0, 1, 100)", "* x), label='sin') bax.plot(x, np.cos(10 * x), label='cos') bax.legend(loc=3) bax.set_xlabel('time')", ".7)), ylims=((-1, .7), (.79, 1)), hspace=.05) x = np.linspace(0, 1,", "usage =========== This example presents the basic usage of brokenaxes", "1, 100) bax.plot(x, np.sin(10 * x), label='sin') bax.plot(x, np.cos(10 *", ".7), (.79, 1)), hspace=.05) x = np.linspace(0, 1, 100) bax.plot(x,", "example presents the basic usage of brokenaxes \"\"\" import matplotlib.pyplot" ]
[ "all.add(stmt) elif prefix == \"org\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\"))", "> 20: continue # ln = obj[\"uri\"].split(\":\")[1] ''' wiki_path =", "(subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif prefix == \"place\": stmt =", "label if \"labels\" in obj and \"ja\" in obj[\"labels\"]: stmt", "Literal(ln_org)) all.add(stmt) path = \"data/all.json\" all.serialize(destination=path, format='json-ld') all.serialize(destination=path.replace(\".json\", \".rdf\"), format='pretty-xml')", "ln in ln_map: ln_org = ln ln = ln_map[ln] if", "if \"http://dbpedia.org\" in value or \"http://ja.dbpedia.org\" in value or \"www.wikidata.org\"", "and prefix == \"place\": value = obj[\"point\"][\"value\"].split(\" \") # addGeo関数", "if suffix not in places: places[suffix] = { \"lat\" :", "urllib import ssl import csv import time import requests import", "stmt = (subject, RDFS.label, Literal(label[\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\" in obj:", "(subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln = wiki_url.split(\"/\")[-1] ''' db_path =", "from rdflib import URIRef, BNode, Literal, Graph from rdflib.namespace import", "all = Graph() with open(\"data/dict.json\") as f: ln_map = json.load(f)", "URIRef(db_uri)) all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\" in obj: stmt = (subject, URIRef(\"http://schema.org/image\"),", "prefix == \"place\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif", "if \"http://dbpedia.org/ontology/thumbnail\" in obj: stmt = (subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt)", "wiki = json.load(f) db_uri = \"http://ja.dbpedia.org/resource/\"+ln if db_uri not in", "= (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif prefix == \"keyword\": stmt", "= (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif prefix == \"time\": stmt", "== \"chname\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif prefix", "json.load(f) if os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f)", "elif prefix == \"place\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt)", "value in values: uri = \"chname:\"+value if field == \"spatial\":", "ssl import csv import time import requests import json import", "elif prefix == \"time\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt)", "import urllib import ssl import csv import time import requests", "with open(db_path) as f: db = json.load(f) if os.path.exists(wiki_path): with", "obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label in labels: if label[\"lang\"]", "os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f) # sameAs", "sys import urllib import ssl import csv import time import", "Namespace all = Graph() with open(\"data/dict.json\") as f: ln_map =", "\"long\": float(value[1]) } all.add(stmt) ''' # 正規化前 if ln_org !=", "###### obj = db[db_uri] stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt)", "= obj[\"uri\"].split(\":\")[1] ''' wiki_path = \"data/wikidata/\"+ln+\".json\" wiki = {} if", "in values: uri = \"chname:\"+value if field == \"spatial\": uri", "float(value[1]) } all.add(stmt) ''' # 正規化前 if ln_org != \"\"", "continue # ln = obj[\"uri\"].split(\":\")[1] ''' wiki_path = \"data/wikidata/\"+ln+\".json\" wiki", "st_path = \"../data/index.json\" with open(st_path) as f: result = json.load(f)", "as f: wiki = json.load(f) # sameAs stmt = (subject,", "elif prefix == \"type\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt)", "''' if \"point\" in obj and prefix == \"place\": value", "== \"time\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif prefix", "\"chname:\"+value if field == \"spatial\": uri = \"place:\"+value if uri", "csv from rdflib import URIRef, BNode, Literal, Graph from rdflib.namespace", "RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif prefix == \"event\": stmt = (subject,", "= (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif prefix == \"event\": stmt", "== \"place\": value = obj[\"point\"][\"value\"].split(\" \") # addGeo関数 geoUri =", "\") # addGeo関数 geoUri = addGeo({ \"lat\" : float(value[0]), \"long\":", "labels: value = label[\"value\"] if \"http://dbpedia.org\" in value or \"http://ja.dbpedia.org\"", "== \"type\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) # ######", "in labels: if label[\"lang\"] == \"ja\": stmt = (subject, RDFS.label,", "= [\"spatial\", \"agential\"] for field in fields: values = obj[field]", "prefix == \"type\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) #", "labels: if label[\"lang\"] == \"ja\": stmt = (subject, RDFS.label, Literal(label[\"value\"]))", "glob import yaml import sys import urllib import ssl import", "in value: stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt) # 位置情報", "for field in fields: values = obj[field] for value in", "all.add(stmt) # label if \"labels\" in obj and \"ja\" in", "f: db = json.load(f) if os.path.exists(wiki_path): with open(wiki_path) as f:", "obj = db[db_uri] stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt) if", "obj: labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label in labels: value =", "all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label", "label[\"lang\"] == \"ja\": stmt = (subject, RDFS.label, Literal(label[\"value\"])) all.add(stmt) if", "or \"http://ja.dbpedia.org\" in value or \"www.wikidata.org\" in value: stmt =", "# description if \"descriptions\" in obj and \"ja\" in obj[\"descriptions\"]:", "labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label in labels: value = label[\"value\"]", "# 正規化前 if ln_org != \"\" and ln != ln_org:", "= (subject, URIRef(\"http://schema.org/name\"), Literal(ln_org)) all.add(stmt) path = \"data/all.json\" all.serialize(destination=path, format='json-ld')", "\"http://dbpedia.org/ontology/thumbnail\" in obj: stmt = (subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if", "= \"\" if ln in ln_map: ln_org = ln ln", "suffix = tmp[1] ln = suffix ln_org = \"\" if", "\"lat\" : float(value[0]), \"long\": float(value[1]) }) stmt = (subject, URIRef(\"http://schema.org/geo\"),", "addGeo({ \"lat\" : float(value[0]), \"long\": float(value[1]) }) stmt = (subject,", "= \"data/dbpedia_ja/\"+ln+\".json\" wiki_path = \"data/wikidata/\"+ln+\".json\" db = {} wiki =", "(subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\" in obj: labels", "= (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif prefix == \"place\": stmt", "# addGeo関数 geoUri = addGeo({ \"lat\" : float(value[0]), \"long\": float(value[1])", "all.add(stmt) elif prefix == \"keyword\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\"))", "uri = \"place:\"+value if uri not in uris: uris.append(uri) for", ": float(value[0]), \"long\": float(value[1]) }) stmt = (subject, URIRef(\"http://schema.org/geo\"), geoUri)", "\"spatial\": uri = \"place:\"+value if uri not in uris: uris.append(uri)", "\"http://www.w3.org/2000/01/rdf-schema#label\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label in labels:", "URIRef(value)) all.add(stmt) # 位置情報 ''' if \"point\" in obj and", "result: fields = [\"spatial\", \"agential\"] for field in fields: values", "value = obj[\"point\"][\"value\"].split(\" \") # addGeo関数 geoUri = addGeo({ \"lat\"", "result = json.load(f) uris = [] for obj in result:", "in obj: labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label in labels: value", "Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\" in obj: labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"]", "= tmp[1] ln = suffix ln_org = \"\" if ln", "in fields: values = obj[field] for value in values: uri", "for obj in result: fields = [\"spatial\", \"agential\"] for field", "import shutil import os import json import glob import yaml", "continue # ###### subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix == \"chname\":", "yaml import sys import urllib import ssl import csv import", "20: continue # ln = obj[\"uri\"].split(\":\")[1] ''' wiki_path = \"data/wikidata/\"+ln+\".json\"", "obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description if \"descriptions\" in obj and", "= (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt) obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description", "\"\" if ln in ln_map: ln_org = ln ln =", "obj[\"descriptions\"]: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt) # label", "RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif prefix == \"time\": stmt = (subject,", "time import requests import json import csv from rdflib import", "lang=\"ja\")) all.add(stmt) # label if \"labels\" in obj and \"ja\"", "(subject, URIRef(\"http://schema.org/name\"), Literal(ln_org)) all.add(stmt) path = \"data/all.json\" all.serialize(destination=path, format='json-ld') all.serialize(destination=path.replace(\".json\",", "places: places[suffix] = { \"lat\" : float(value[0]), \"long\": float(value[1]) }", "== \"keyword\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif prefix", "import URIRef, BNode, Literal, Graph from rdflib.namespace import RDF, RDFS,", "= ln ln = ln_map[ln] if len(ln) > 20: continue", "= uri.split(\":\") prefix = tmp[0] suffix = tmp[1] ln =", "elif prefix == \"event\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt)", "open(wiki_path) as f: wiki = json.load(f) # sameAs stmt =", "= {} if os.path.exists(db_path): with open(db_path) as f: db =", "db[db_uri] stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\" in", "ln_org: stmt = (subject, URIRef(\"http://schema.org/name\"), Literal(ln_org)) all.add(stmt) path = \"data/all.json\"", "wiki = {} if os.path.exists(db_path): with open(db_path) as f: db", "(subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) # ###### obj = db[db_uri] stmt", "\"../data/index.json\" with open(st_path) as f: result = json.load(f) uris =", "(subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif prefix == \"org\": stmt =", "obj[field] for value in values: uri = \"chname:\"+value if field", "import os import json import glob import yaml import sys", "stmt = (subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln = wiki_url.split(\"/\")[-1] '''", "if field == \"spatial\": uri = \"place:\"+value if uri not", "tmp[0] suffix = tmp[1] ln = suffix ln_org = \"\"", "= Graph() with open(\"data/dict.json\") as f: ln_map = json.load(f) st_path", "= (subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\" in obj:", "RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln = wiki_url.split(\"/\")[-1] ''' db_path = \"data/dbpedia_ja/\"+ln+\".json\"", "import glob import yaml import sys import urllib import ssl", "= (subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt) # label if \"labels\"", "} all.add(stmt) ''' # 正規化前 if ln_org != \"\" and", "URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif prefix == \"type\": stmt = (subject, RDF.type,", "in uris: print(uri) tmp = uri.split(\":\") prefix = tmp[0] suffix", "if \"descriptions\" in obj and \"ja\" in obj[\"descriptions\"]: stmt =", "if db_uri not in db: print(\"not\" , db_uri) continue #", "import yaml import sys import urllib import ssl import csv", "\"data/wikidata/\"+ln+\".json\" wiki = {} if os.path.exists(wiki_path): with open(wiki_path) as f:", "print(\"not\" , db_uri) continue # ###### subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if", "all.add(stmt) elif prefix == \"time\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\"))", "prefix == \"chname\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif", "RDFS, FOAF, XSD from rdflib import Namespace all = Graph()", "stmt = (subject, URIRef(\"http://schema.org/name\"), Literal(ln_org)) all.add(stmt) path = \"data/all.json\" all.serialize(destination=path,", "\"http://ja.dbpedia.org\" in value or \"www.wikidata.org\" in value: stmt = (subject,", "\"\" and ln != ln_org: stmt = (subject, URIRef(\"http://schema.org/name\"), Literal(ln_org))", "wiki_url.split(\"/\")[-1] ''' db_path = \"data/dbpedia_ja/\"+ln+\".json\" wiki_path = \"data/wikidata/\"+ln+\".json\" db =", "= db[db_uri] stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\"", "uri = \"chname:\"+value if field == \"spatial\": uri = \"place:\"+value", "all.add(stmt) elif prefix == \"type\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\"))", "= { \"lat\" : float(value[0]), \"long\": float(value[1]) } all.add(stmt) '''", "\"org\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif prefix ==", "if os.path.exists(db_path): with open(db_path) as f: db = json.load(f) if", "== \"org\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif prefix", "float(value[0]), \"long\": float(value[1]) }) stmt = (subject, URIRef(\"http://schema.org/geo\"), geoUri) if", "if prefix == \"chname\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt)", "Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt) # label if \"labels\" in obj and", "db_path = \"data/dbpedia_ja/\"+ln+\".json\" wiki_path = \"data/wikidata/\"+ln+\".json\" db = {} wiki", "(subject, RDFS.label, Literal(label[\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\" in obj: labels =", "= label[\"value\"] if \"http://dbpedia.org\" in value or \"http://ja.dbpedia.org\" in value", "if \"http://www.w3.org/2000/01/rdf-schema#label\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label in", "= json.load(f) db_uri = \"http://ja.dbpedia.org/resource/\"+ln if db_uri not in db:", "rdflib.namespace import RDF, RDFS, FOAF, XSD from rdflib import Namespace", "obj and prefix == \"place\": value = obj[\"point\"][\"value\"].split(\" \") #", "open(wiki_path) as f: wiki = json.load(f) db_uri = \"http://ja.dbpedia.org/resource/\"+ln if", "and \"ja\" in obj[\"labels\"]: stmt = (subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt)", "(subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\" in obj: stmt =", "URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for", "= obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label in labels: value = label[\"value\"] if", "os.path.exists(db_path): with open(db_path) as f: db = json.load(f) if os.path.exists(wiki_path):", "in obj and prefix == \"place\": value = obj[\"point\"][\"value\"].split(\" \")", "Literal, Graph from rdflib.namespace import RDF, RDFS, FOAF, XSD from", "value = label[\"value\"] if \"http://dbpedia.org\" in value or \"http://ja.dbpedia.org\" in", "\"http://www.w3.org/2002/07/owl#sameAs\" in obj: labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label in labels:", "field == \"spatial\": uri = \"place:\"+value if uri not in", "= (subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln = wiki_url.split(\"/\")[-1] ''' db_path", "prefix == \"keyword\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif", "URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif prefix == \"event\": stmt = (subject, RDF.type,", "not in db: print(\"not\" , db_uri) continue # ###### subject", "float(value[0]), \"long\": float(value[1]) } all.add(stmt) ''' # 正規化前 if ln_org", "RDF, RDFS, FOAF, XSD from rdflib import Namespace all =", "\"ja\": stmt = (subject, RDFS.label, Literal(label[\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\" in", "json.load(f) st_path = \"../data/index.json\" with open(st_path) as f: result =", "ln ln = ln_map[ln] if len(ln) > 20: continue #", "if len(ln) > 20: continue # ln = obj[\"uri\"].split(\":\")[1] '''", "obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label in labels: value = label[\"value\"] if \"http://dbpedia.org\"", "ln = ln_map[ln] if len(ln) > 20: continue # ln", "\"place\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif prefix ==", "import csv from rdflib import URIRef, BNode, Literal, Graph from", "for label in labels: if label[\"lang\"] == \"ja\": stmt =", "import requests import json import csv from rdflib import URIRef,", "import time import requests import json import csv from rdflib", "len(ln) > 20: continue # ln = obj[\"uri\"].split(\":\")[1] ''' wiki_path", "(subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif prefix == \"event\": stmt =", "= (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif prefix == \"type\": stmt", "\"type\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) # ###### obj", "(subject, URIRef(\"http://schema.org/geo\"), geoUri) if suffix not in places: places[suffix] =", "uris: uris.append(uri) for uri in uris: print(uri) tmp = uri.split(\":\")", "###### subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix == \"chname\": stmt =", "rdflib import URIRef, BNode, Literal, Graph from rdflib.namespace import RDF,", "all.add(stmt) ''' # 正規化前 if ln_org != \"\" and ln", "= \"place:\"+value if uri not in uris: uris.append(uri) for uri", "for label in labels: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"]))", "all.add(stmt) elif prefix == \"place\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\"))", ": float(value[0]), \"long\": float(value[1]) } all.add(stmt) ''' # 正規化前 if", "= obj[field] for value in values: uri = \"chname:\"+value if", "geoUri = addGeo({ \"lat\" : float(value[0]), \"long\": float(value[1]) }) stmt", "= \"../data/index.json\" with open(st_path) as f: result = json.load(f) uris", "URIRef(wiki_url)) all.add(stmt) obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description if \"descriptions\" in", "Literal(label[\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for", "位置情報 ''' if \"point\" in obj and prefix == \"place\":", "RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif prefix == \"keyword\": stmt = (subject,", "in result: fields = [\"spatial\", \"agential\"] for field in fields:", "not in places: places[suffix] = { \"lat\" : float(value[0]), \"long\":", "labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label in labels: if label[\"lang\"] ==", "as f: ln_map = json.load(f) st_path = \"../data/index.json\" with open(st_path)", "= (subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\" in obj: labels", "stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif prefix == \"place\":", "URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix == \"chname\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\"))", "for uri in uris: print(uri) tmp = uri.split(\":\") prefix =", "all.add(stmt) elif prefix == \"event\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\"))", "{} if os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f)", "\"descriptions\" in obj and \"ja\" in obj[\"descriptions\"]: stmt = (subject,", "prefix == \"place\": value = obj[\"point\"][\"value\"].split(\" \") # addGeo関数 geoUri", "obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label in labels: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"],", "import json import csv from rdflib import URIRef, BNode, Literal,", "\"data/dbpedia_ja/\"+ln+\".json\" wiki_path = \"data/wikidata/\"+ln+\".json\" db = {} wiki = {}", "tmp = uri.split(\":\") prefix = tmp[0] suffix = tmp[1] ln", "正規化前 if ln_org != \"\" and ln != ln_org: stmt", "\"http://ja.dbpedia.org/resource/\"+ln if db_uri not in db: print(\"not\" , db_uri) continue", "(subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt) obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description if", "= suffix ln_org = \"\" if ln in ln_map: ln_org", "if \"point\" in obj and prefix == \"place\": value =", "obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label in labels: stmt =", "field in fields: values = obj[field] for value in values:", "in obj: stmt = (subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\"", "elif prefix == \"keyword\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt)", "fields = [\"spatial\", \"agential\"] for field in fields: values =", "open(\"data/dict.json\") as f: ln_map = json.load(f) st_path = \"../data/index.json\" with", "\"agential\"] for field in fields: values = obj[field] for value", "in obj and \"ja\" in obj[\"labels\"]: stmt = (subject, RDFS.label,", "lang=label[\"lang\"])) all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\" in obj: labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for", "URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif prefix == \"keyword\": stmt = (subject, RDF.type,", "shutil import os import json import glob import yaml import", "(subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif prefix == \"type\": stmt =", "wiki = {} if os.path.exists(wiki_path): with open(wiki_path) as f: wiki", "# ###### subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix == \"chname\": stmt", "ln_map: ln_org = ln ln = ln_map[ln] if len(ln) >", "wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description if \"descriptions\" in obj and \"ja\" in", "\"http://www.w3.org/2000/01/rdf-schema#comment\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label in labels:", "stmt = (subject, URIRef(\"http://schema.org/geo\"), geoUri) if suffix not in places:", "f: wiki = json.load(f) # sameAs stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"),", "rdflib import Namespace all = Graph() with open(\"data/dict.json\") as f:", "== \"place\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif prefix", "!= \"\" and ln != ln_org: stmt = (subject, URIRef(\"http://schema.org/name\"),", "= \"data/wikidata/\"+ln+\".json\" wiki = {} if os.path.exists(wiki_path): with open(wiki_path) as", "{} wiki = {} if os.path.exists(db_path): with open(db_path) as f:", "in obj[\"labels\"]: stmt = (subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln =", "values = obj[field] for value in values: uri = \"chname:\"+value", "stmt = (subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\" in obj:", "in labels: value = label[\"value\"] if \"http://dbpedia.org\" in value or", "ln_org != \"\" and ln != ln_org: stmt = (subject,", "import csv import time import requests import json import csv", "stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\" in obj:", "open(st_path) as f: result = json.load(f) uris = [] for", "= (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) # ###### obj = db[db_uri]", "ln_org = ln ln = ln_map[ln] if len(ln) > 20:", "\"event\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif prefix ==", "= URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix == \"chname\": stmt = (subject, RDF.type,", "f: ln_map = json.load(f) st_path = \"../data/index.json\" with open(st_path) as", "uris.append(uri) for uri in uris: print(uri) tmp = uri.split(\":\") prefix", "\"keyword\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif prefix ==", "stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt) # 位置情報 ''' if", "ln = obj[\"uri\"].split(\":\")[1] ''' wiki_path = \"data/wikidata/\"+ln+\".json\" wiki = {}", "Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln = wiki_url.split(\"/\")[-1] ''' db_path = \"data/dbpedia_ja/\"+ln+\".json\" wiki_path", "in uris: uris.append(uri) for uri in uris: print(uri) tmp =", "ln = suffix ln_org = \"\" if ln in ln_map:", "as f: db = json.load(f) if os.path.exists(wiki_path): with open(wiki_path) as", "= obj[\"point\"][\"value\"].split(\" \") # addGeo関数 geoUri = addGeo({ \"lat\" :", "f: result = json.load(f) uris = [] for obj in", "Graph() with open(\"data/dict.json\") as f: ln_map = json.load(f) st_path =", "== \"ja\": stmt = (subject, RDFS.label, Literal(label[\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\"", "XSD from rdflib import Namespace all = Graph() with open(\"data/dict.json\")", "= addGeo({ \"lat\" : float(value[0]), \"long\": float(value[1]) }) stmt =", "all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label", "json import csv from rdflib import URIRef, BNode, Literal, Graph", "db: print(\"not\" , db_uri) continue # ###### subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln)", "{} if os.path.exists(db_path): with open(db_path) as f: db = json.load(f)", "= tmp[0] suffix = tmp[1] ln = suffix ln_org =", "\"ja\" in obj[\"descriptions\"]: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt)", "= (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\" in obj: stmt", "URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif prefix == \"place\": stmt = (subject, RDF.type,", "db = json.load(f) if os.path.exists(wiki_path): with open(wiki_path) as f: wiki", "tmp[1] ln = suffix ln_org = \"\" if ln in", "if ln_org != \"\" and ln != ln_org: stmt =", "in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label in labels: if", "URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt) # 位置情報 ''' if \"point\" in obj", "(subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt) # label if \"labels\" in", "in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label in labels: stmt", "\"data/wikidata/\"+ln+\".json\" db = {} wiki = {} if os.path.exists(db_path): with", "prefix == \"event\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif", "obj[\"uri\"].split(\":\")[1] ''' wiki_path = \"data/wikidata/\"+ln+\".json\" wiki = {} if os.path.exists(wiki_path):", "all.add(stmt) obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description if \"descriptions\" in obj", "obj and \"ja\" in obj[\"labels\"]: stmt = (subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"]))", "= (subject, URIRef(\"http://schema.org/geo\"), geoUri) if suffix not in places: places[suffix]", "stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt) # label if", "ln_org = \"\" if ln in ln_map: ln_org = ln", "# label if \"labels\" in obj and \"ja\" in obj[\"labels\"]:", "in value or \"http://ja.dbpedia.org\" in value or \"www.wikidata.org\" in value:", "wiki_path = \"data/wikidata/\"+ln+\".json\" db = {} wiki = {} if", "places[suffix] = { \"lat\" : float(value[0]), \"long\": float(value[1]) } all.add(stmt)", "\"place\": value = obj[\"point\"][\"value\"].split(\" \") # addGeo関数 geoUri = addGeo({", "wiki_path = \"data/wikidata/\"+ln+\".json\" wiki = {} if os.path.exists(wiki_path): with open(wiki_path)", "URIRef(\"http://schema.org/geo\"), geoUri) if suffix not in places: places[suffix] = {", "uri not in uris: uris.append(uri) for uri in uris: print(uri)", "if os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f) #", ", db_uri) continue # ###### subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix", "db_uri) continue # ###### subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix ==", "if \"labels\" in obj and \"ja\" in obj[\"labels\"]: stmt =", "labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label in labels: stmt = (subject,", "stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif prefix == \"keyword\":", "if os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f) db_uri", "with open(\"data/dict.json\") as f: ln_map = json.load(f) st_path = \"../data/index.json\"", "in db: print(\"not\" , db_uri) continue # ###### subject =", "suffix ln_org = \"\" if ln in ln_map: ln_org =", "''' db_path = \"data/dbpedia_ja/\"+ln+\".json\" wiki_path = \"data/wikidata/\"+ln+\".json\" db = {}", "prefix == \"org\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif", "if label[\"lang\"] == \"ja\": stmt = (subject, RDFS.label, Literal(label[\"value\"])) all.add(stmt)", "\"http://dbpedia.org\" in value or \"http://ja.dbpedia.org\" in value or \"www.wikidata.org\" in", "label in labels: if label[\"lang\"] == \"ja\": stmt = (subject,", "import RDF, RDFS, FOAF, XSD from rdflib import Namespace all", "in value or \"www.wikidata.org\" in value: stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"),", "\"place:\"+value if uri not in uris: uris.append(uri) for uri in", "(subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif prefix == \"keyword\": stmt =", "= ln_map[ln] if len(ln) > 20: continue # ln =", "and ln != ln_org: stmt = (subject, URIRef(\"http://schema.org/name\"), Literal(ln_org)) all.add(stmt)", "if uri not in uris: uris.append(uri) for uri in uris:", "stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif prefix == \"time\":", "fields: values = obj[field] for value in values: uri =", "elif prefix == \"org\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt)", "= \"chname:\"+value if field == \"spatial\": uri = \"place:\"+value if", "= obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label in labels: stmt = (subject, URIRef(\"http://schema.org/description\"),", "json.load(f) db_uri = \"http://ja.dbpedia.org/resource/\"+ln if db_uri not in db: print(\"not\"", "label in labels: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt)", "\"time\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif prefix ==", "for label in labels: value = label[\"value\"] if \"http://dbpedia.org\" in", "\"www.wikidata.org\" in value: stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt) #", "= (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif prefix == \"org\": stmt", "open(db_path) as f: db = json.load(f) if os.path.exists(wiki_path): with open(wiki_path)", "(subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt) # 位置情報 ''' if \"point\" in", "all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\" in obj: labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label", "''' # 正規化前 if ln_org != \"\" and ln !=", "db = {} wiki = {} if os.path.exists(db_path): with open(db_path)", "json.load(f) uris = [] for obj in result: fields =", "requests import json import csv from rdflib import URIRef, BNode,", "\"lat\" : float(value[0]), \"long\": float(value[1]) } all.add(stmt) ''' # 正規化前", "json import glob import yaml import sys import urllib import", "from rdflib.namespace import RDF, RDFS, FOAF, XSD from rdflib import", "as f: wiki = json.load(f) db_uri = \"http://ja.dbpedia.org/resource/\"+ln if db_uri", "ln = wiki_url.split(\"/\")[-1] ''' db_path = \"data/dbpedia_ja/\"+ln+\".json\" wiki_path = \"data/wikidata/\"+ln+\".json\"", "json.load(f) # sameAs stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt) obj", "obj in result: fields = [\"spatial\", \"agential\"] for field in", "URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) # ###### obj = db[db_uri] stmt = (subject,", "Graph from rdflib.namespace import RDF, RDFS, FOAF, XSD from rdflib", "with open(wiki_path) as f: wiki = json.load(f) # sameAs stmt", "sameAs stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt) obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]]", "= {} if os.path.exists(wiki_path): with open(wiki_path) as f: wiki =", "ln != ln_org: stmt = (subject, URIRef(\"http://schema.org/name\"), Literal(ln_org)) all.add(stmt) path", "print(uri) tmp = uri.split(\":\") prefix = tmp[0] suffix = tmp[1]", "\"ja\" in obj[\"labels\"]: stmt = (subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln", "if \"http://www.w3.org/2000/01/rdf-schema#comment\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label in", "prefix = tmp[0] suffix = tmp[1] ln = suffix ln_org", "= (subject, RDFS.label, Literal(label[\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\" in obj: labels", "stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif prefix == \"org\":", "or \"www.wikidata.org\" in value: stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt)", "= obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label in labels: if label[\"lang\"] == \"ja\":", "labels: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\"", "# sameAs stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt) obj =", "(subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\" in obj: labels =", "stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) # ###### obj =", "= json.load(f) uris = [] for obj in result: fields", "value or \"http://ja.dbpedia.org\" in value or \"www.wikidata.org\" in value: stmt", "RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) # ###### obj = db[db_uri] stmt =", "if \"http://www.w3.org/2002/07/owl#sameAs\" in obj: labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label in", "= json.load(f) st_path = \"../data/index.json\" with open(st_path) as f: result", "in obj[\"descriptions\"]: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt) #", "label in labels: value = label[\"value\"] if \"http://dbpedia.org\" in value", "float(value[1]) }) stmt = (subject, URIRef(\"http://schema.org/geo\"), geoUri) if suffix not", "prefix == \"time\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif", "RDFS.label, Literal(label[\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"]", "URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt) # label if \"labels\" in obj", "ln_map = json.load(f) st_path = \"../data/index.json\" with open(st_path) as f:", "= \"data/wikidata/\"+ln+\".json\" db = {} wiki = {} if os.path.exists(db_path):", "URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\" in obj: labels =", "RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif prefix == \"place\": stmt = (subject,", "URIRef, BNode, Literal, Graph from rdflib.namespace import RDF, RDFS, FOAF,", "geoUri) if suffix not in places: places[suffix] = { \"lat\"", "\"point\" in obj and prefix == \"place\": value = obj[\"point\"][\"value\"].split(\"", "with open(st_path) as f: result = json.load(f) uris = []", "os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f) db_uri =", "!= ln_org: stmt = (subject, URIRef(\"http://schema.org/name\"), Literal(ln_org)) all.add(stmt) path =", "subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix == \"chname\": stmt = (subject,", "description if \"descriptions\" in obj and \"ja\" in obj[\"descriptions\"]: stmt", "db_uri not in db: print(\"not\" , db_uri) continue # ######", "os import json import glob import yaml import sys import", "# ###### obj = db[db_uri] stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri))", "# ln = obj[\"uri\"].split(\":\")[1] ''' wiki_path = \"data/wikidata/\"+ln+\".json\" wiki =", "}) stmt = (subject, URIRef(\"http://schema.org/geo\"), geoUri) if suffix not in", "(subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif prefix == \"time\": stmt =", "as f: result = json.load(f) uris = [] for obj", "suffix not in places: places[suffix] = { \"lat\" : float(value[0]),", "all.add(stmt) # 位置情報 ''' if \"point\" in obj and prefix", "value or \"www.wikidata.org\" in value: stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value))", "obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label in labels: if label[\"lang\"] == \"ja\": stmt", "# 位置情報 ''' if \"point\" in obj and prefix ==", "''' wiki_path = \"data/wikidata/\"+ln+\".json\" wiki = {} if os.path.exists(wiki_path): with", "= {} wiki = {} if os.path.exists(db_path): with open(db_path) as", "f: wiki = json.load(f) db_uri = \"http://ja.dbpedia.org/resource/\"+ln if db_uri not", "RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif prefix == \"type\": stmt = (subject,", "uris: print(uri) tmp = uri.split(\":\") prefix = tmp[0] suffix =", "obj and \"ja\" in obj[\"descriptions\"]: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"],", "= (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt) # 位置情報 ''' if \"point\"", "URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"]", "uris = [] for obj in result: fields = [\"spatial\",", "not in uris: uris.append(uri) for uri in uris: print(uri) tmp", "== \"event\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif prefix", "\"labels\" in obj and \"ja\" in obj[\"labels\"]: stmt = (subject,", "value: stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt) # 位置情報 '''", "csv import time import requests import json import csv from", "all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\" in obj: stmt = (subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"]))", "= \"http://ja.dbpedia.org/resource/\"+ln if db_uri not in db: print(\"not\" , db_uri)", "FOAF, XSD from rdflib import Namespace all = Graph() with", "stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt) obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]] #", "URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif prefix == \"time\": stmt = (subject, RDF.type,", "addGeo関数 geoUri = addGeo({ \"lat\" : float(value[0]), \"long\": float(value[1]) })", "URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt) obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description if \"descriptions\"", "URIRef(\"http://schema.org/name\"), Literal(ln_org)) all.add(stmt) path = \"data/all.json\" all.serialize(destination=path, format='json-ld') all.serialize(destination=path.replace(\".json\", \".rdf\"),", "all.add(stmt) # ###### obj = db[db_uri] stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"),", "= [] for obj in result: fields = [\"spatial\", \"agential\"]", "URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif prefix == \"org\": stmt = (subject, RDF.type,", "= json.load(f) if os.path.exists(wiki_path): with open(wiki_path) as f: wiki =", "all.add(stmt) ln = wiki_url.split(\"/\")[-1] ''' db_path = \"data/dbpedia_ja/\"+ln+\".json\" wiki_path =", "RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif prefix == \"org\": stmt = (subject,", "label[\"value\"] if \"http://dbpedia.org\" in value or \"http://ja.dbpedia.org\" in value or", "in ln_map: ln_org = ln ln = ln_map[ln] if len(ln)", "{ \"lat\" : float(value[0]), \"long\": float(value[1]) } all.add(stmt) ''' #", "for value in values: uri = \"chname:\"+value if field ==", "ln_map[ln] if len(ln) > 20: continue # ln = obj[\"uri\"].split(\":\")[1]", "uri.split(\":\") prefix = tmp[0] suffix = tmp[1] ln = suffix", "in obj and \"ja\" in obj[\"descriptions\"]: stmt = (subject, URIRef(\"http://schema.org/description\"),", "import ssl import csv import time import requests import json", "values: uri = \"chname:\"+value if field == \"spatial\": uri =", "import Namespace all = Graph() with open(\"data/dict.json\") as f: ln_map", "obj[\"point\"][\"value\"].split(\" \") # addGeo関数 geoUri = addGeo({ \"lat\" : float(value[0]),", "in labels: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt) if", "BNode, Literal, Graph from rdflib.namespace import RDF, RDFS, FOAF, XSD", "from rdflib import Namespace all = Graph() with open(\"data/dict.json\") as", "= wiki_url.split(\"/\")[-1] ''' db_path = \"data/dbpedia_ja/\"+ln+\".json\" wiki_path = \"data/wikidata/\"+ln+\".json\" db", "and \"ja\" in obj[\"descriptions\"]: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\"))", "[] for obj in result: fields = [\"spatial\", \"agential\"] for", "\"long\": float(value[1]) }) stmt = (subject, URIRef(\"http://schema.org/geo\"), geoUri) if suffix", "if ln in ln_map: ln_org = ln ln = ln_map[ln]", "import sys import urllib import ssl import csv import time", "[\"spatial\", \"agential\"] for field in fields: values = obj[field] for", "wiki = json.load(f) # sameAs stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url))", "db_uri = \"http://ja.dbpedia.org/resource/\"+ln if db_uri not in db: print(\"not\" ,", "\"chname\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif prefix ==", "import json import glob import yaml import sys import urllib", "uri in uris: print(uri) tmp = uri.split(\":\") prefix = tmp[0]", "with open(wiki_path) as f: wiki = json.load(f) db_uri = \"http://ja.dbpedia.org/resource/\"+ln", "= wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description if \"descriptions\" in obj and \"ja\"", "obj[\"labels\"]: stmt = (subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln = wiki_url.split(\"/\")[-1]", "= json.load(f) # sameAs stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt)", "stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif prefix == \"event\":", "stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif prefix == \"type\":", "URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\" in obj: stmt = (subject,", "in places: places[suffix] = { \"lat\" : float(value[0]), \"long\": float(value[1])", "== \"spatial\": uri = \"place:\"+value if uri not in uris:", "stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\" in", "obj: stmt = (subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\" in" ]
[ "# The first two board answers the ping board_answers =", "in range(1, 3)] + [None] read_can_datagram.return_value = iter(board_answers) write_command_retry.return_value =", "@patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print') def test_network_discovery(self, print_mock, read_can_datagram, write_command, write_command_retry,", "* import bootloader_read_config from commands import * import sys import", "open_conn.return_value = object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0, 1, 2]) all_configs", "in range(3)] write_command_retry.return_value = { i: packb(configs[i]) for i in", "\"test.py -p /dev/ttyUSB0 --all\".split() # The first two board answers", "[{'id': i} for i in range(3)] write_command_retry.return_value = { i:", "sys.argv = \"test.py -p /dev/ttyUSB0 --all\".split() # The first two", "packb({'id': i}) for i in range(1, 3) } bootloader_read_config.main() write_command.assert_any_call(open_conn.return_value,", "for i in range(1, 3) } bootloader_read_config.main() write_command.assert_any_call(open_conn.return_value, encode_ping(), list(range(1,", "print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print') def test_network_discovery(self,", "\"\"\" sys.argv = \"test.py -p /dev/ttyUSB0 --all\".split() # The first", "the ping board_answers = [(b'', [0], i) for i in", "ping board_answers = [(b'', [0], i) for i in range(1,", "@patch('builtins.print') def test_integration(self, print_mock, open_conn, write_command, write_command_retry): sys.argv = \"test.py", "0 1 2\".split() configs = [{'id': i} for i in", "* except ImportError: from mock import * from msgpack import", "1, 2]) all_configs = {i: configs[i] for i in range(3)}", "import * import sys import json class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command')", "i in range(3) } open_conn.return_value = object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(),", "{i: configs[i] for i in range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True)) @patch('utils.open_connection')", "configs = [{'id': i} for i in range(3)] write_command_retry.return_value =", "= [(b'', [0], i) for i in range(1, 3)] +", "Checks if we can perform a whole network discovery. \"\"\"", "commands import * import sys import json class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry')", "print_mock, open_conn, write_command, write_command_retry): sys.argv = \"test.py -p /dev/ttyUSB0 0", "@patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print') def test_integration(self, print_mock, open_conn, write_command, write_command_retry): sys.argv", "import * except ImportError: from mock import * from msgpack", "first two board answers the ping board_answers = [(b'', [0],", "= \"test.py -p /dev/ttyUSB0 --all\".split() # The first two board", "discovery. \"\"\" sys.argv = \"test.py -p /dev/ttyUSB0 --all\".split() # The", "= { i: packb({'id': i}) for i in range(1, 3)", "2\".split() configs = [{'id': i} for i in range(3)] write_command_retry.return_value", "object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0, 1, 2]) all_configs = {i:", "i in range(1, 3)] + [None] read_can_datagram.return_value = iter(board_answers) write_command_retry.return_value", "except ImportError: from mock import * from msgpack import *", "print_mock, read_can_datagram, write_command, write_command_retry, open_conn): \"\"\" Checks if we can", "sys import json class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print') def", "3)] + [None] read_can_datagram.return_value = iter(board_answers) write_command_retry.return_value = { i:", "ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print') def test_integration(self, print_mock, open_conn, write_command,", "for i in range(3)] write_command_retry.return_value = { i: packb(configs[i]) for", "write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0, 1, 2]) all_configs = {i: configs[i] for", "The first two board answers the ping board_answers = [(b'',", "[(b'', [0], i) for i in range(1, 3)] + [None]", "/dev/ttyUSB0 --all\".split() # The first two board answers the ping", "--all\".split() # The first two board answers the ping board_answers", "test_integration(self, print_mock, open_conn, write_command, write_command_retry): sys.argv = \"test.py -p /dev/ttyUSB0", "for i in range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command')", "import json class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print') def test_integration(self,", "a whole network discovery. \"\"\" sys.argv = \"test.py -p /dev/ttyUSB0", "from mock import * from msgpack import * import bootloader_read_config", "class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print') def test_integration(self, print_mock, open_conn,", "unittest try: from unittest.mock import * except ImportError: from mock", "range(3)] write_command_retry.return_value = { i: packb(configs[i]) for i in range(3)", "= iter(board_answers) write_command_retry.return_value = { i: packb({'id': i}) for i", "{ i: packb({'id': i}) for i in range(1, 3) }", "i in range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams')", "sys.argv = \"test.py -p /dev/ttyUSB0 0 1 2\".split() configs =", "\"\"\" Checks if we can perform a whole network discovery.", "write_command_retry.return_value = { i: packb({'id': i}) for i in range(1,", "for i in range(1, 3)] + [None] read_can_datagram.return_value = iter(board_answers)", "i}) for i in range(1, 3) } bootloader_read_config.main() write_command.assert_any_call(open_conn.return_value, encode_ping(),", "board answers the ping board_answers = [(b'', [0], i) for", "encode_read_config(), [0, 1, 2]) all_configs = {i: configs[i] for i", "two board answers the ping board_answers = [(b'', [0], i)", "/dev/ttyUSB0 0 1 2\".split() configs = [{'id': i} for i", "1 2\".split() configs = [{'id': i} for i in range(3)]", "ImportError: from mock import * from msgpack import * import", "iter(board_answers) write_command_retry.return_value = { i: packb({'id': i}) for i in", "import bootloader_read_config from commands import * import sys import json", "from commands import * import sys import json class ReadConfigToolTestCase(unittest.TestCase):", "{ i: packb(configs[i]) for i in range(3) } open_conn.return_value =", "@patch('builtins.print') def test_network_discovery(self, print_mock, read_can_datagram, write_command, write_command_retry, open_conn): \"\"\" Checks", "i: packb(configs[i]) for i in range(3) } open_conn.return_value = object()", "= { i: packb(configs[i]) for i in range(3) } open_conn.return_value", "in range(3) } open_conn.return_value = object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0,", "json class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print') def test_integration(self, print_mock,", "range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print') def", "range(3) } open_conn.return_value = object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0, 1,", "if we can perform a whole network discovery. \"\"\" sys.argv", "i: packb({'id': i}) for i in range(1, 3) } bootloader_read_config.main()", "i} for i in range(3)] write_command_retry.return_value = { i: packb(configs[i])", "open_conn): \"\"\" Checks if we can perform a whole network", "write_command, write_command_retry): sys.argv = \"test.py -p /dev/ttyUSB0 0 1 2\".split()", "bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0, 1, 2]) all_configs = {i: configs[i]", "sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print') def test_network_discovery(self, print_mock, read_can_datagram,", "def test_network_discovery(self, print_mock, read_can_datagram, write_command, write_command_retry, open_conn): \"\"\" Checks if", "for i in range(3) } open_conn.return_value = object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value,", "read_can_datagram, write_command, write_command_retry, open_conn): \"\"\" Checks if we can perform", "test_network_discovery(self, print_mock, read_can_datagram, write_command, write_command_retry, open_conn): \"\"\" Checks if we", "range(1, 3)] + [None] read_can_datagram.return_value = iter(board_answers) write_command_retry.return_value = {", "i in range(1, 3) } bootloader_read_config.main() write_command.assert_any_call(open_conn.return_value, encode_ping(), list(range(1, 128)))", "bootloader_read_config from commands import * import sys import json class", "open_conn, write_command, write_command_retry): sys.argv = \"test.py -p /dev/ttyUSB0 0 1", "import * from msgpack import * import bootloader_read_config from commands", "-p /dev/ttyUSB0 --all\".split() # The first two board answers the", "import * import bootloader_read_config from commands import * import sys", "whole network discovery. \"\"\" sys.argv = \"test.py -p /dev/ttyUSB0 --all\".split()", "import unittest try: from unittest.mock import * except ImportError: from", "unittest.mock import * except ImportError: from mock import * from", "= {i: configs[i] for i in range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True))", "from msgpack import * import bootloader_read_config from commands import *", "mock import * from msgpack import * import bootloader_read_config from", "write_command, write_command_retry, open_conn): \"\"\" Checks if we can perform a", "indent=4, sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print') def test_network_discovery(self, print_mock,", "* from msgpack import * import bootloader_read_config from commands import", "packb(configs[i]) for i in range(3) } open_conn.return_value = object() bootloader_read_config.main()", "2]) all_configs = {i: configs[i] for i in range(3)} print_mock.assert_any_call(json.dumps(all_configs,", "* import sys import json class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection')", "import sys import json class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print')", "} open_conn.return_value = object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0, 1, 2])", "in range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print')", "write_command_retry): sys.argv = \"test.py -p /dev/ttyUSB0 0 1 2\".split() configs", "\"test.py -p /dev/ttyUSB0 0 1 2\".split() configs = [{'id': i}", "answers the ping board_answers = [(b'', [0], i) for i", "[0], i) for i in range(1, 3)] + [None] read_can_datagram.return_value", "def test_integration(self, print_mock, open_conn, write_command, write_command_retry): sys.argv = \"test.py -p", "@patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print') def test_network_discovery(self, print_mock, read_can_datagram, write_command,", "-p /dev/ttyUSB0 0 1 2\".split() configs = [{'id': i} for", "= [{'id': i} for i in range(3)] write_command_retry.return_value = {", "perform a whole network discovery. \"\"\" sys.argv = \"test.py -p", "network discovery. \"\"\" sys.argv = \"test.py -p /dev/ttyUSB0 --all\".split() #", "@patch('utils.open_connection') @patch('builtins.print') def test_integration(self, print_mock, open_conn, write_command, write_command_retry): sys.argv =", "i in range(3)] write_command_retry.return_value = { i: packb(configs[i]) for i", "write_command_retry.return_value = { i: packb(configs[i]) for i in range(3) }", "from unittest.mock import * except ImportError: from mock import *", "write_command_retry, open_conn): \"\"\" Checks if we can perform a whole", "all_configs = {i: configs[i] for i in range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4,", "@patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print') def test_network_discovery(self, print_mock, read_can_datagram, write_command, write_command_retry, open_conn):", "try: from unittest.mock import * except ImportError: from mock import", "[0, 1, 2]) all_configs = {i: configs[i] for i in", "i) for i in range(1, 3)] + [None] read_can_datagram.return_value =", "can perform a whole network discovery. \"\"\" sys.argv = \"test.py", "msgpack import * import bootloader_read_config from commands import * import", "[None] read_can_datagram.return_value = iter(board_answers) write_command_retry.return_value = { i: packb({'id': i})", "@patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print') def test_integration(self, print_mock, open_conn, write_command, write_command_retry):", "+ [None] read_can_datagram.return_value = iter(board_answers) write_command_retry.return_value = { i: packb({'id':", "configs[i] for i in range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry')", "read_can_datagram.return_value = iter(board_answers) write_command_retry.return_value = { i: packb({'id': i}) for", "board_answers = [(b'', [0], i) for i in range(1, 3)]", "@patch('utils.read_can_datagrams') @patch('builtins.print') def test_network_discovery(self, print_mock, read_can_datagram, write_command, write_command_retry, open_conn): \"\"\"", "= \"test.py -p /dev/ttyUSB0 0 1 2\".split() configs = [{'id':", "= object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0, 1, 2]) all_configs =", "we can perform a whole network discovery. \"\"\" sys.argv =" ]
[ "msg = await diary_embed(username) except LetterboxdError as err: msg =", "err await send_msg(ctx, msg) async def check_if_two_args(ctx): msg = ctx.message.content.split()", "except LetterboxdError as err: msg = err await send_msg(ctx, msg)", "await ctx.send('Sorry, the command crashed. :/') logging.error(ctx.message.content) raise error async", "await review_embed(username, ' '.join(str(i) for i in args)) except LetterboxdError", "bot.process_commands(message) async def update_stats(): while True: await bot.change_presence( activity=discord.Game('!helplb -", "commands.CheckFailure)): return elif isinstance(error, commands.CommandInvokeError): if isinstance(error.original, discord.HTTPException): return else:", "msg = await crew_embed(arg, ctx.invoked_with) except LetterboxdError as err: msg", "%(message)s', datefmt='%m/%d %H:%M:%S') bot = commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help') @bot.event async", "if message.content.startswith('!'): message.content = message.content.replace('’', '').replace('‘', '') await bot.process_commands(message) async", "msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args) async def review(ctx, username, *args): try: msg", "send_msg(ctx, msg) @bot.command(aliases=['actor', 'actress', 'director']) async def crew(ctx, *, arg):", "True elif found_bot_msg: if log_message.content: first_word = log_message.content.split()[0] else: continue", "True) else: msg = await film_embed(arg) except LetterboxdError as err:", "inline=False) help_embed.description = 'Invite Bot | '\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await", "| %(message)s', datefmt='%m/%d %H:%M:%S') bot = commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help') @bot.event", "async def update_stats(): while True: await bot.change_presence( activity=discord.Game('!helplb - {}", "requires the {} permission.'.format( ', '.join(err for err in error.missing_perms)))", "diary(ctx, username): try: msg = await diary_embed(username) except LetterboxdError as", "isinstance(error, (commands.CommandNotFound, commands.CheckFailure)): return elif isinstance(error, commands.CommandInvokeError): if isinstance(error.original, discord.HTTPException):", "as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['movie']) async", "await send_msg(ctx, msg) @bot.command() async def diary(ctx, username): try: msg", "list() for command in bot.commands: cmd_list.append('!' + command.name) for alias", "= err await send_msg(ctx, msg) @bot.command(aliases=['movie']) async def film(ctx, *,", "helpers import LetterboxdError from list_ import list_embed from review import", "ctx.invoked_with) except LetterboxdError as err: msg = err await send_msg(ctx,", "command crashed. :/') logging.error(ctx.message.content) raise error async def send_msg(ctx, msg):", "found_bot_msg = True elif found_bot_msg: if log_message.content: first_word = log_message.content.split()[0]", "alias in command.aliases: cmd_list.append('!' + alias) async for log_message in", "help_embed.set_footer( text='Created by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for key, value in SETTINGS['help'].items():", "from list_ import list_embed from review import review_embed from user", "2 @bot.command(name='list') @commands.check(check_if_two_args) async def list_(ctx, username, *args): try: msg", "while True: await bot.change_presence( activity=discord.Game('!helplb - {} servers'.format( len(bot.guilds)))) await", "< 3: await ctx.send('This command requires 2 parameters.') return len(msg)", "from user import user_embed logging.basicConfig( level=logging.INFO, format='%(asctime)s | %(message)s', datefmt='%m/%d", "LetterboxdError as err: msg = err await send_msg(ctx, msg) async", "check_if_two_args(ctx): msg = ctx.message.content.split() if len(msg) < 3: await ctx.send('This", "msg) @bot.command() async def diary(ctx, username): try: msg = await", "bot.user.name)) bot.loop.create_task(update_stats()) @bot.event async def on_message(message): if message.content.startswith('!'): message.content =", "await ctx.send(embed=help_embed) @bot.command() async def user(ctx, username): try: msg =", "async def on_message(message): if message.content.startswith('!'): message.content = message.content.replace('’', '').replace('‘', '')", "the command crashed. :/') logging.error(ctx.message.content) raise error async def send_msg(ctx,", "message.content.startswith('!'): message.content = message.content.replace('’', '').replace('‘', '') await bot.process_commands(message) async def", "err await send_msg(ctx, msg) @bot.command() async def diary(ctx, username): try:", "review_embed from user import user_embed logging.basicConfig( level=logging.INFO, format='%(asctime)s | %(message)s',", "import discord from discord.ext import commands from config import SETTINGS", "and not found_bot_msg: bot_message = log_message found_bot_msg = True elif", "list_embed(username, ' '.join(str(i) for i in args)) except LetterboxdError as", "import diary_embed from film import film_embed from helpers import LetterboxdError", "logging.info( 'Logged in %d servers as %s' % (len(bot.guilds), bot.user.name))", "continue if first_word in cmd_list: found_usr_cmd = True cmd_message =", "on_command_error(ctx, error): if isinstance(error, commands.MissingRequiredArgument): await ctx.send('This command requires a", "crew(ctx, *, arg): try: msg = await crew_embed(arg, ctx.invoked_with) except", "@commands.check(check_if_two_args) async def review(ctx, username, *args): try: msg = await", "review import review_embed from user import user_embed logging.basicConfig( level=logging.INFO, format='%(asctime)s", "and ctx.guild.id in SETTINGS['mkdb_servers']: msg = await film_embed(arg, True) else:", "command requires 2 parameters.') return len(msg) > 2 @bot.command(name='list') @commands.check(check_if_two_args)", "eiga.me ratings for specific servers if ctx.guild and ctx.guild.id in", "def diary(ctx, username): try: msg = await diary_embed(username) except LetterboxdError", "# eiga.me ratings for specific servers if ctx.guild and ctx.guild.id", "async for log_message in ctx.channel.history(limit=30): if log_message.author.id == bot.user.id and", "await ctx.send(embed=msg) else: await ctx.send(msg) # Commands @bot.command() async def", "await list_embed(username, ' '.join(str(i) for i in args)) except LetterboxdError", "- {} servers'.format( len(bot.guilds)))) await sleep(900) @bot.event async def on_command_error(ctx,", "await send_msg(ctx, msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args) async def review(ctx, username, *args):", "msg = await user_embed(username) except LetterboxdError as err: msg =", "elif isinstance(error, commands.CommandInvokeError): if isinstance(error.original, discord.HTTPException): return else: await ctx.send('Sorry,", "msg = err await send_msg(ctx, msg) @bot.command(aliases=['actor', 'actress', 'director']) async", "'\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed) @bot.command() async def user(ctx, username):", "'Invite Bot | '\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed) @bot.command() async", "57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created by", "command requires the {} permission.'.format( ', '.join(err for err in", "= err await send_msg(ctx, msg) @bot.command(aliases=['actor', 'actress', 'director']) async def", "if not ctx.author.permissions_in(ctx.channel).manage_messages: if not cmd_message.author.id == ctx.author.id: return await", "if ctx.guild and ctx.guild.id in SETTINGS['mkdb_servers']: msg = await film_embed(arg,", "bot.change_presence( activity=discord.Game('!helplb - {} servers'.format( len(bot.guilds)))) await sleep(900) @bot.event async", "diary_embed(username) except LetterboxdError as err: msg = err await send_msg(ctx,", "> 2 @bot.command(name='list') @commands.check(check_if_two_args) async def list_(ctx, username, *args): try:", "log_message found_bot_msg = True elif found_bot_msg: if log_message.content: first_word =", "format='%(asctime)s | %(message)s', datefmt='%m/%d %H:%M:%S') bot = commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help')", "= await film_embed(arg, True) else: msg = await film_embed(arg) except", "send_msg(ctx, msg) async def check_if_two_args(ctx): msg = ctx.message.content.split() if len(msg)", "import review_embed from user import user_embed logging.basicConfig( level=logging.INFO, format='%(asctime)s |", "= message.content.replace('’', '').replace('‘', '') await bot.process_commands(message) async def update_stats(): while", "a parameter.') elif isinstance(error, commands.BotMissingPermissions): await ctx.send('This command requires the", "= await film_embed(arg) except LetterboxdError as err: msg = err", "await ctx.send('This command requires 2 parameters.') return len(msg) > 2", "= err await send_msg(ctx, msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async def delete(ctx):", "err await send_msg(ctx, msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async def delete(ctx): await", "import LetterboxdError from list_ import list_embed from review import review_embed", "| '\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed) @bot.command() async def user(ctx,", "first_word = log_message.content.split()[0] else: continue if first_word in cmd_list: found_usr_cmd", "error async def send_msg(ctx, msg): if isinstance(msg, discord.Embed): await ctx.send(embed=msg)", "def check_if_two_args(ctx): msg = ctx.message.content.split() if len(msg) < 3: await", "import commands from config import SETTINGS from crew import crew_embed", "len(msg) > 2 @bot.command(name='list') @commands.check(check_if_two_args) async def list_(ctx, username, *args):", "arg): try: msg = await crew_embed(arg, ctx.invoked_with) except LetterboxdError as", "for err in error.missing_perms))) elif isinstance(error, (commands.CommandNotFound, commands.CheckFailure)): return elif", "user_embed logging.basicConfig( level=logging.INFO, format='%(asctime)s | %(message)s', datefmt='%m/%d %H:%M:%S') bot =", "msg): if isinstance(msg, discord.Embed): await ctx.send(embed=msg) else: await ctx.send(msg) #", "from crew import crew_embed from diary import diary_embed from film", "help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png')", "ctx.guild and ctx.guild.id in SETTINGS['mkdb_servers']: msg = await film_embed(arg, True)", "crew import crew_embed from diary import diary_embed from film import", "await film_embed(arg) except LetterboxdError as err: msg = err await", "await diary_embed(username) except LetterboxdError as err: msg = err await", "%s' % (len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats()) @bot.event async def on_message(message): if", "cmd_list.append('!' + alias) async for log_message in ctx.channel.history(limit=30): if log_message.author.id", "in cmd_list: found_usr_cmd = True cmd_message = log_message break if", "'director']) async def crew(ctx, *, arg): try: msg = await", "isinstance(error.original, discord.HTTPException): return else: await ctx.send('Sorry, the command crashed. :/')", "arg): try: # eiga.me ratings for specific servers if ctx.guild", "icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for key, value in", "as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args)", "ctx.message.delete() found_bot_msg = False found_usr_cmd = False cmd_list = list()", "@bot.command(aliases=['actor', 'actress', 'director']) async def crew(ctx, *, arg): try: msg", "return elif isinstance(error, commands.CommandInvokeError): if isinstance(error.original, discord.HTTPException): return else: await", "def crew(ctx, *, arg): try: msg = await crew_embed(arg, ctx.invoked_with)", "i in args)) except LetterboxdError as err: msg = err", "(len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats()) @bot.event async def on_message(message): if message.content.startswith('!'): message.content", "async def on_ready(): logging.info( 'Logged in %d servers as %s'", "% (len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats()) @bot.event async def on_message(message): if message.content.startswith('!'):", "commands.CommandInvokeError): if isinstance(error.original, discord.HTTPException): return else: await ctx.send('Sorry, the command", "list_ import list_embed from review import review_embed from user import", "async def send_msg(ctx, msg): if isinstance(msg, discord.Embed): await ctx.send(embed=msg) else:", "activity=discord.Game('!helplb - {} servers'.format( len(bot.guilds)))) await sleep(900) @bot.event async def", "try: msg = await diary_embed(username) except LetterboxdError as err: msg", "sleep(900) @bot.event async def on_command_error(ctx, error): if isinstance(error, commands.MissingRequiredArgument): await", "ctx.send('This command requires 2 parameters.') return len(msg) > 2 @bot.command(name='list')", "crashed. :/') logging.error(ctx.message.content) raise error async def send_msg(ctx, msg): if", "def list_(ctx, username, *args): try: msg = await list_embed(username, '", "msg = err await send_msg(ctx, msg) @bot.command(aliases=['movie']) async def film(ctx,", "servers'.format( len(bot.guilds)))) await sleep(900) @bot.event async def on_command_error(ctx, error): if", "msg = await list_embed(username, ' '.join(str(i) for i in args))", "on_message(message): if message.content.startswith('!'): message.content = message.content.replace('’', '').replace('‘', '') await bot.process_commands(message)", "film_embed from helpers import LetterboxdError from list_ import list_embed from", "= ctx.message.content.split() if len(msg) < 3: await ctx.send('This command requires", "help_embed.set_author( name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for", "ctx.send('This command requires a parameter.') elif isinstance(error, commands.BotMissingPermissions): await ctx.send('This", "elif isinstance(error, commands.BotMissingPermissions): await ctx.send('This command requires the {} permission.'.format(", "film(ctx, *, arg): try: # eiga.me ratings for specific servers", "LetterboxdError from list_ import list_embed from review import review_embed from", "discord from discord.ext import commands from config import SETTINGS from", "err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args) async", "name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for key,", "*args): try: msg = await review_embed(username, ' '.join(str(i) for i", "username, *args): try: msg = await list_embed(username, ' '.join(str(i) for", "def on_message(message): if message.content.startswith('!'): message.content = message.content.replace('’', '').replace('‘', '') await", "' '.join(str(i) for i in args)) except LetterboxdError as err:", "@bot.command() async def user(ctx, username): try: msg = await user_embed(username)", "value in SETTINGS['help'].items(): help_embed.add_field(name=key, value=value, inline=False) help_embed.description = 'Invite Bot", "specific servers if ctx.guild and ctx.guild.id in SETTINGS['mkdb_servers']: msg =", "send_msg(ctx, msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args) async def review(ctx, username, *args): try:", "commands from config import SETTINGS from crew import crew_embed from", "diary import diary_embed from film import film_embed from helpers import", "err await send_msg(ctx, msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args) async def review(ctx, username,", "servers as %s' % (len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats()) @bot.event async def", "msg = await film_embed(arg) except LetterboxdError as err: msg =", "help_embed.description = 'Invite Bot | '\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed)", "ctx.send(msg) # Commands @bot.command() async def helplb(ctx): help_embed = discord.Embed(colour=discord.Color.from_rgb(54,", "if not cmd_message.author.id == ctx.author.id: return await cmd_message.delete() await bot_message.delete()", "bot.commands: cmd_list.append('!' + command.name) for alias in command.aliases: cmd_list.append('!' +", "message.content.replace('’', '').replace('‘', '') await bot.process_commands(message) async def update_stats(): while True:", "command.name) for alias in command.aliases: cmd_list.append('!' + alias) async for", "help_embed = discord.Embed(colour=discord.Color.from_rgb(54, 57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg')", "log_message.content.split()[0] else: continue if first_word in cmd_list: found_usr_cmd = True", "list_embed from review import review_embed from user import user_embed logging.basicConfig(", "'.join(err for err in error.missing_perms))) elif isinstance(error, (commands.CommandNotFound, commands.CheckFailure)): return", "async def list_(ctx, username, *args): try: msg = await list_embed(username,", "Bot | '\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed) @bot.command() async def", "send_msg(ctx, msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async def delete(ctx): await ctx.message.delete() found_bot_msg", "ctx.send('This command requires the {} permission.'.format( ', '.join(err for err", "update_stats(): while True: await bot.change_presence( activity=discord.Game('!helplb - {} servers'.format( len(bot.guilds))))", "def film(ctx, *, arg): try: # eiga.me ratings for specific", "def user(ctx, username): try: msg = await user_embed(username) except LetterboxdError", "= await crew_embed(arg, ctx.invoked_with) except LetterboxdError as err: msg =", "film_embed(arg, True) else: msg = await film_embed(arg) except LetterboxdError as", "found_usr_cmd = False cmd_list = list() for command in bot.commands:", "parameter.') elif isinstance(error, commands.BotMissingPermissions): await ctx.send('This command requires the {}", "await send_msg(ctx, msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async def delete(ctx): await ctx.message.delete()", "return len(msg) > 2 @bot.command(name='list') @commands.check(check_if_two_args) async def list_(ctx, username,", "import film_embed from helpers import LetterboxdError from list_ import list_embed", "SETTINGS from crew import crew_embed from diary import diary_embed from", "msg) @bot.command(aliases=['movie']) async def film(ctx, *, arg): try: # eiga.me", "3: await ctx.send('This command requires 2 parameters.') return len(msg) >", "found_bot_msg = False found_usr_cmd = False cmd_list = list() for", "in %d servers as %s' % (len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats()) @bot.event", "= await review_embed(username, ' '.join(str(i) for i in args)) except", "alias) async for log_message in ctx.channel.history(limit=30): if log_message.author.id == bot.user.id", "try: # eiga.me ratings for specific servers if ctx.guild and", "log_message.author.id == bot.user.id and not found_bot_msg: bot_message = log_message found_bot_msg", "from discord.ext import commands from config import SETTINGS from crew", "= commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help') @bot.event async def on_ready(): logging.info( 'Logged", "err await send_msg(ctx, msg) @bot.command(aliases=['movie']) async def film(ctx, *, arg):", "if found_usr_cmd: if not ctx.author.permissions_in(ctx.channel).manage_messages: if not cmd_message.author.id == ctx.author.id:", "not found_bot_msg: bot_message = log_message found_bot_msg = True elif found_bot_msg:", "commands.MissingRequiredArgument): await ctx.send('This command requires a parameter.') elif isinstance(error, commands.BotMissingPermissions):", "await send_msg(ctx, msg) @bot.command(aliases=['actor', 'actress', 'director']) async def crew(ctx, *,", "discord.HTTPException): return else: await ctx.send('Sorry, the command crashed. :/') logging.error(ctx.message.content)", "error): if isinstance(error, commands.MissingRequiredArgument): await ctx.send('This command requires a parameter.')", "'Logged in %d servers as %s' % (len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats())", "else: await ctx.send(msg) # Commands @bot.command() async def helplb(ctx): help_embed", "discord.Embed(colour=discord.Color.from_rgb(54, 57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created", "user(ctx, username): try: msg = await user_embed(username) except LetterboxdError as", "helplb(ctx): help_embed = discord.Embed(colour=discord.Color.from_rgb(54, 57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd Bot',", "await film_embed(arg, True) else: msg = await film_embed(arg) except LetterboxdError", "'.join(str(i) for i in args)) except LetterboxdError as err: msg", "for specific servers if ctx.guild and ctx.guild.id in SETTINGS['mkdb_servers']: msg", "else: msg = await film_embed(arg) except LetterboxdError as err: msg", "= True cmd_message = log_message break if found_usr_cmd: if not", "+ command.name) for alias in command.aliases: cmd_list.append('!' + alias) async", "await bot.change_presence( activity=discord.Game('!helplb - {} servers'.format( len(bot.guilds)))) await sleep(900) @bot.event", "Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for key, value", "as err: msg = err await send_msg(ctx, msg) async def", "for i in args)) except LetterboxdError as err: msg =", "(commands.CommandNotFound, commands.CheckFailure)): return elif isinstance(error, commands.CommandInvokeError): if isinstance(error.original, discord.HTTPException): return", "SETTINGS['mkdb_servers']: msg = await film_embed(arg, True) else: msg = await", ":/') logging.error(ctx.message.content) raise error async def send_msg(ctx, msg): if isinstance(msg,", "import user_embed logging.basicConfig( level=logging.INFO, format='%(asctime)s | %(message)s', datefmt='%m/%d %H:%M:%S') bot", "send_msg(ctx, msg) @bot.command() async def diary(ctx, username): try: msg =", "%H:%M:%S') bot = commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help') @bot.event async def on_ready():", "import sleep import discord from discord.ext import commands from config", "= await list_embed(username, ' '.join(str(i) for i in args)) except", "in args)) except LetterboxdError as err: msg = err await", "else: continue if first_word in cmd_list: found_usr_cmd = True cmd_message", "def on_command_error(ctx, error): if isinstance(error, commands.MissingRequiredArgument): await ctx.send('This command requires", "err in error.missing_perms))) elif isinstance(error, (commands.CommandNotFound, commands.CheckFailure)): return elif isinstance(error,", "{} servers'.format( len(bot.guilds)))) await sleep(900) @bot.event async def on_command_error(ctx, error):", "@bot.command(name='list') @commands.check(check_if_two_args) async def list_(ctx, username, *args): try: msg =", "ctx.message.content.split() if len(msg) < 3: await ctx.send('This command requires 2", "= err await send_msg(ctx, msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args) async def review(ctx,", "async def diary(ctx, username): try: msg = await diary_embed(username) except", "def review(ctx, username, *args): try: msg = await review_embed(username, '", "False found_usr_cmd = False cmd_list = list() for command in", "diary_embed from film import film_embed from helpers import LetterboxdError from", "log_message.content: first_word = log_message.content.split()[0] else: continue if first_word in cmd_list:", "def update_stats(): while True: await bot.change_presence( activity=discord.Game('!helplb - {} servers'.format(", "@bot.event async def on_message(message): if message.content.startswith('!'): message.content = message.content.replace('’', '').replace('‘',", "return else: await ctx.send('Sorry, the command crashed. :/') logging.error(ctx.message.content) raise", "try: msg = await user_embed(username) except LetterboxdError as err: msg", "= await diary_embed(username) except LetterboxdError as err: msg = err", "async def review(ctx, username, *args): try: msg = await review_embed(username,", "if len(msg) < 3: await ctx.send('This command requires 2 parameters.')", "*, arg): try: msg = await crew_embed(arg, ctx.invoked_with) except LetterboxdError", "as err: msg = err await send_msg(ctx, msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True)", "from film import film_embed from helpers import LetterboxdError from list_", "@bot.command() async def diary(ctx, username): try: msg = await diary_embed(username)", "import list_embed from review import review_embed from user import user_embed", "elif found_bot_msg: if log_message.content: first_word = log_message.content.split()[0] else: continue if", "async def crew(ctx, *, arg): try: msg = await crew_embed(arg,", "review(ctx, username, *args): try: msg = await review_embed(username, ' '.join(str(i)", "def on_ready(): logging.info( 'Logged in %d servers as %s' %", "await ctx.message.delete() found_bot_msg = False found_usr_cmd = False cmd_list =", "if isinstance(error.original, discord.HTTPException): return else: await ctx.send('Sorry, the command crashed.", "by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for key, value in SETTINGS['help'].items(): help_embed.add_field(name=key, value=value,", "asyncio import sleep import discord from discord.ext import commands from", "== bot.user.id and not found_bot_msg: bot_message = log_message found_bot_msg =", "@bot.event async def on_command_error(ctx, error): if isinstance(error, commands.MissingRequiredArgument): await ctx.send('This", "logging.basicConfig( level=logging.INFO, format='%(asctime)s | %(message)s', datefmt='%m/%d %H:%M:%S') bot = commands.Bot(command_prefix='!',", "@commands.bot_has_permissions(manage_messages=True) async def delete(ctx): await ctx.message.delete() found_bot_msg = False found_usr_cmd", "*, arg): try: # eiga.me ratings for specific servers if", "not ctx.author.permissions_in(ctx.channel).manage_messages: if not cmd_message.author.id == ctx.author.id: return await cmd_message.delete()", "datefmt='%m/%d %H:%M:%S') bot = commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help') @bot.event async def", "text='Created by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for key, value in SETTINGS['help'].items(): help_embed.add_field(name=key,", "bot = commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help') @bot.event async def on_ready(): logging.info(", "def send_msg(ctx, msg): if isinstance(msg, discord.Embed): await ctx.send(embed=msg) else: await", "isinstance(msg, discord.Embed): await ctx.send(embed=msg) else: await ctx.send(msg) # Commands @bot.command()", "crew_embed from diary import diary_embed from film import film_embed from", "help_embed.add_field(name=key, value=value, inline=False) help_embed.description = 'Invite Bot | '\\ +", "cmd_list.append('!' + command.name) for alias in command.aliases: cmd_list.append('!' + alias)", "delete(ctx): await ctx.message.delete() found_bot_msg = False found_usr_cmd = False cmd_list", "await user_embed(username) except LetterboxdError as err: msg = err await", "def helplb(ctx): help_embed = discord.Embed(colour=discord.Color.from_rgb(54, 57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd", "ctx.guild.id in SETTINGS['mkdb_servers']: msg = await film_embed(arg, True) else: msg", "as %s' % (len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats()) @bot.event async def on_message(message):", "ctx.author.permissions_in(ctx.channel).manage_messages: if not cmd_message.author.id == ctx.author.id: return await cmd_message.delete() await", "in command.aliases: cmd_list.append('!' + alias) async for log_message in ctx.channel.history(limit=30):", "try: msg = await crew_embed(arg, ctx.invoked_with) except LetterboxdError as err:", "await crew_embed(arg, ctx.invoked_with) except LetterboxdError as err: msg = err", "await sleep(900) @bot.event async def on_command_error(ctx, error): if isinstance(error, commands.MissingRequiredArgument):", "= await user_embed(username) except LetterboxdError as err: msg = err", "SETTINGS['help'].items(): help_embed.add_field(name=key, value=value, inline=False) help_embed.description = 'Invite Bot | '\\", "permission.'.format( ', '.join(err for err in error.missing_perms))) elif isinstance(error, (commands.CommandNotFound,", "= discord.Embed(colour=discord.Color.from_rgb(54, 57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer(", "the {} permission.'.format( ', '.join(err for err in error.missing_perms))) elif", "ctx.send(embed=msg) else: await ctx.send(msg) # Commands @bot.command() async def helplb(ctx):", "2 parameters.') return len(msg) > 2 @bot.command(name='list') @commands.check(check_if_two_args) async def", "async def delete(ctx): await ctx.message.delete() found_bot_msg = False found_usr_cmd =", "False cmd_list = list() for command in bot.commands: cmd_list.append('!' +", "from review import review_embed from user import user_embed logging.basicConfig( level=logging.INFO,", "log_message in ctx.channel.history(limit=30): if log_message.author.id == bot.user.id and not found_bot_msg:", "@bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async def delete(ctx): await ctx.message.delete() found_bot_msg = False", "found_usr_cmd = True cmd_message = log_message break if found_usr_cmd: if", "LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['actor',", "async def check_if_two_args(ctx): msg = ctx.message.content.split() if len(msg) < 3:", "isinstance(error, commands.BotMissingPermissions): await ctx.send('This command requires the {} permission.'.format( ',", "in SETTINGS['mkdb_servers']: msg = await film_embed(arg, True) else: msg =", "ctx.channel.history(limit=30): if log_message.author.id == bot.user.id and not found_bot_msg: bot_message =", "*args): try: msg = await list_embed(username, ' '.join(str(i) for i", "@bot.command(aliases=['movie']) async def film(ctx, *, arg): try: # eiga.me ratings", "= log_message found_bot_msg = True elif found_bot_msg: if log_message.content: first_word", "isinstance(error, commands.CommandInvokeError): if isinstance(error.original, discord.HTTPException): return else: await ctx.send('Sorry, the", "err: msg = err await send_msg(ctx, msg) async def check_if_two_args(ctx):", "len(msg) < 3: await ctx.send('This command requires 2 parameters.') return", "len(bot.guilds)))) await sleep(900) @bot.event async def on_command_error(ctx, error): if isinstance(error,", "msg = ctx.message.content.split() if len(msg) < 3: await ctx.send('This command", "if isinstance(error, commands.MissingRequiredArgument): await ctx.send('This command requires a parameter.') elif", "msg = err await send_msg(ctx, msg) @bot.command() async def diary(ctx,", "msg) async def check_if_two_args(ctx): msg = ctx.message.content.split() if len(msg) <", "err await send_msg(ctx, msg) @bot.command(aliases=['actor', 'actress', 'director']) async def crew(ctx,", "= 'Invite Bot | '\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed) @bot.command()", "msg = await film_embed(arg, True) else: msg = await film_embed(arg)", "parameters.') return len(msg) > 2 @bot.command(name='list') @commands.check(check_if_two_args) async def list_(ctx,", "await ctx.send('This command requires a parameter.') elif isinstance(error, commands.BotMissingPermissions): await", "on_ready(): logging.info( 'Logged in %d servers as %s' % (len(bot.guilds),", "def delete(ctx): await ctx.message.delete() found_bot_msg = False found_usr_cmd = False", "as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['actor', 'actress',", "msg = await review_embed(username, ' '.join(str(i) for i in args))", "'') await bot.process_commands(message) async def update_stats(): while True: await bot.change_presence(", "logging from asyncio import sleep import discord from discord.ext import", "icon_url='https://i.imgur.com/li4cLpd.png') for key, value in SETTINGS['help'].items(): help_embed.add_field(name=key, value=value, inline=False) help_embed.description", "await ctx.send('This command requires the {} permission.'.format( ', '.join(err for", "cmd_list: found_usr_cmd = True cmd_message = log_message break if found_usr_cmd:", "cmd_list = list() for command in bot.commands: cmd_list.append('!' + command.name)", "sleep import discord from discord.ext import commands from config import", "bot.loop.create_task(update_stats()) @bot.event async def on_message(message): if message.content.startswith('!'): message.content = message.content.replace('’',", "Commands @bot.command() async def helplb(ctx): help_embed = discord.Embed(colour=discord.Color.from_rgb(54, 57, 62))", "import logging from asyncio import sleep import discord from discord.ext", "for log_message in ctx.channel.history(limit=30): if log_message.author.id == bot.user.id and not", "LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command()", "msg = err await send_msg(ctx, msg) async def check_if_two_args(ctx): msg", "args)) except LetterboxdError as err: msg = err await send_msg(ctx,", "in bot.commands: cmd_list.append('!' + command.name) for alias in command.aliases: cmd_list.append('!'", "err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['movie']) async def", "'').replace('‘', '') await bot.process_commands(message) async def update_stats(): while True: await", "await bot.process_commands(message) async def update_stats(): while True: await bot.change_presence( activity=discord.Game('!helplb", "for key, value in SETTINGS['help'].items(): help_embed.add_field(name=key, value=value, inline=False) help_embed.description =", "else: await ctx.send('Sorry, the command crashed. :/') logging.error(ctx.message.content) raise error", "found_usr_cmd: if not ctx.author.permissions_in(ctx.channel).manage_messages: if not cmd_message.author.id == ctx.author.id: return", "bot.remove_command('help') @bot.event async def on_ready(): logging.info( 'Logged in %d servers", "'[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed) @bot.command() async def user(ctx, username): try: msg", "await send_msg(ctx, msg) @bot.command(aliases=['movie']) async def film(ctx, *, arg): try:", "from diary import diary_embed from film import film_embed from helpers", "@bot.event async def on_ready(): logging.info( 'Logged in %d servers as", "= False found_usr_cmd = False cmd_list = list() for command", "await ctx.send(msg) # Commands @bot.command() async def helplb(ctx): help_embed =", "cmd_message = log_message break if found_usr_cmd: if not ctx.author.permissions_in(ctx.channel).manage_messages: if", "Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for key, value in SETTINGS['help'].items(): help_embed.add_field(name=key, value=value, inline=False)", "for command in bot.commands: cmd_list.append('!' + command.name) for alias in", "logging.error(ctx.message.content) raise error async def send_msg(ctx, msg): if isinstance(msg, discord.Embed):", "= False cmd_list = list() for command in bot.commands: cmd_list.append('!'", "= list() for command in bot.commands: cmd_list.append('!' + command.name) for", "', '.join(err for err in error.missing_perms))) elif isinstance(error, (commands.CommandNotFound, commands.CheckFailure)):", "err: msg = err await send_msg(ctx, msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async", "from config import SETTINGS from crew import crew_embed from diary", "if isinstance(msg, discord.Embed): await ctx.send(embed=msg) else: await ctx.send(msg) # Commands", "film import film_embed from helpers import LetterboxdError from list_ import", "async def film(ctx, *, arg): try: # eiga.me ratings for", "if log_message.content: first_word = log_message.content.split()[0] else: continue if first_word in", "= log_message.content.split()[0] else: continue if first_word in cmd_list: found_usr_cmd =", "not cmd_message.author.id == ctx.author.id: return await cmd_message.delete() await bot_message.delete() bot.run(SETTINGS['discord'])", "send_msg(ctx, msg) @bot.command(aliases=['movie']) async def film(ctx, *, arg): try: #", "message.content = message.content.replace('’', '').replace('‘', '') await bot.process_commands(message) async def update_stats():", "review_embed(username, ' '.join(str(i) for i in args)) except LetterboxdError as", "from helpers import LetterboxdError from list_ import list_embed from review", "config import SETTINGS from crew import crew_embed from diary import", "if log_message.author.id == bot.user.id and not found_bot_msg: bot_message = log_message", "= log_message break if found_usr_cmd: if not ctx.author.permissions_in(ctx.channel).manage_messages: if not", "level=logging.INFO, format='%(asctime)s | %(message)s', datefmt='%m/%d %H:%M:%S') bot = commands.Bot(command_prefix='!', case_insensitive=True)", "True: await bot.change_presence( activity=discord.Game('!helplb - {} servers'.format( len(bot.guilds)))) await sleep(900)", "+ alias) async for log_message in ctx.channel.history(limit=30): if log_message.author.id ==", "@bot.command() async def helplb(ctx): help_embed = discord.Embed(colour=discord.Color.from_rgb(54, 57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png')", "= True elif found_bot_msg: if log_message.content: first_word = log_message.content.split()[0] else:", "in ctx.channel.history(limit=30): if log_message.author.id == bot.user.id and not found_bot_msg: bot_message", "try: msg = await list_embed(username, ' '.join(str(i) for i in", "# Commands @bot.command() async def helplb(ctx): help_embed = discord.Embed(colour=discord.Color.from_rgb(54, 57,", "await send_msg(ctx, msg) async def check_if_two_args(ctx): msg = ctx.message.content.split() if", "ctx.send(embed=help_embed) @bot.command() async def user(ctx, username): try: msg = await", "LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(name='del')", "command requires a parameter.') elif isinstance(error, commands.BotMissingPermissions): await ctx.send('This command", "break if found_usr_cmd: if not ctx.author.permissions_in(ctx.channel).manage_messages: if not cmd_message.author.id ==", "err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['actor', 'actress', 'director'])", "elif isinstance(error, (commands.CommandNotFound, commands.CheckFailure)): return elif isinstance(error, commands.CommandInvokeError): if isinstance(error.original,", "discord.ext import commands from config import SETTINGS from crew import", "msg) @bot.command(aliases=['actor', 'actress', 'director']) async def crew(ctx, *, arg): try:", "True cmd_message = log_message break if found_usr_cmd: if not ctx.author.permissions_in(ctx.channel).manage_messages:", "crew_embed(arg, ctx.invoked_with) except LetterboxdError as err: msg = err await", "bot.user.id and not found_bot_msg: bot_message = log_message found_bot_msg = True", "raise error async def send_msg(ctx, msg): if isinstance(msg, discord.Embed): await", "discord.Embed): await ctx.send(embed=msg) else: await ctx.send(msg) # Commands @bot.command() async", "in SETTINGS['help'].items(): help_embed.add_field(name=key, value=value, inline=False) help_embed.description = 'Invite Bot |", "async def on_command_error(ctx, error): if isinstance(error, commands.MissingRequiredArgument): await ctx.send('This command", "async def helplb(ctx): help_embed = discord.Embed(colour=discord.Color.from_rgb(54, 57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author(", "ctx.send('Sorry, the command crashed. :/') logging.error(ctx.message.content) raise error async def", "async def user(ctx, username): try: msg = await user_embed(username) except", "command.aliases: cmd_list.append('!' + alias) async for log_message in ctx.channel.history(limit=30): if", "isinstance(error, commands.MissingRequiredArgument): await ctx.send('This command requires a parameter.') elif isinstance(error,", "try: msg = await review_embed(username, ' '.join(str(i) for i in", "servers if ctx.guild and ctx.guild.id in SETTINGS['mkdb_servers']: msg = await", "commands.BotMissingPermissions): await ctx.send('This command requires the {} permission.'.format( ', '.join(err", "from asyncio import sleep import discord from discord.ext import commands", "in error.missing_perms))) elif isinstance(error, (commands.CommandNotFound, commands.CheckFailure)): return elif isinstance(error, commands.CommandInvokeError):", "error.missing_perms))) elif isinstance(error, (commands.CommandNotFound, commands.CheckFailure)): return elif isinstance(error, commands.CommandInvokeError): if", "value=value, inline=False) help_embed.description = 'Invite Bot | '\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)'", "key, value in SETTINGS['help'].items(): help_embed.add_field(name=key, value=value, inline=False) help_embed.description = 'Invite", "LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['movie'])", "%d servers as %s' % (len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats()) @bot.event async", "found_bot_msg: if log_message.content: first_word = log_message.content.split()[0] else: continue if first_word", "msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async def delete(ctx): await ctx.message.delete() found_bot_msg =", "bot_message = log_message found_bot_msg = True elif found_bot_msg: if log_message.content:", "as err: msg = err await send_msg(ctx, msg) @bot.command() async", "'actress', 'director']) async def crew(ctx, *, arg): try: msg =", "msg = err await send_msg(ctx, msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args) async def", "@bot.command(aliases=['entry']) @commands.check(check_if_two_args) async def review(ctx, username, *args): try: msg =", "list_(ctx, username, *args): try: msg = await list_embed(username, ' '.join(str(i)", "case_insensitive=True) bot.remove_command('help') @bot.event async def on_ready(): logging.info( 'Logged in %d", "first_word in cmd_list: found_usr_cmd = True cmd_message = log_message break", "for alias in command.aliases: cmd_list.append('!' + alias) async for log_message", "LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['entry'])", "found_bot_msg: bot_message = log_message found_bot_msg = True elif found_bot_msg: if", "command in bot.commands: cmd_list.append('!' + command.name) for alias in command.aliases:", "film_embed(arg) except LetterboxdError as err: msg = err await send_msg(ctx,", "username): try: msg = await user_embed(username) except LetterboxdError as err:", "requires 2 parameters.') return len(msg) > 2 @bot.command(name='list') @commands.check(check_if_two_args) async", "send_msg(ctx, msg): if isinstance(msg, discord.Embed): await ctx.send(embed=msg) else: await ctx.send(msg)", "import SETTINGS from crew import crew_embed from diary import diary_embed", "= err await send_msg(ctx, msg) @bot.command() async def diary(ctx, username):", "+ '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed) @bot.command() async def user(ctx, username): try:", "user import user_embed logging.basicConfig( level=logging.INFO, format='%(asctime)s | %(message)s', datefmt='%m/%d %H:%M:%S')", "username, *args): try: msg = await review_embed(username, ' '.join(str(i) for", "msg = err await send_msg(ctx, msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async def", "err: msg = err await send_msg(ctx, msg) @bot.command() async def", "log_message break if found_usr_cmd: if not ctx.author.permissions_in(ctx.channel).manage_messages: if not cmd_message.author.id", "if first_word in cmd_list: found_usr_cmd = True cmd_message = log_message", "@commands.check(check_if_two_args) async def list_(ctx, username, *args): try: msg = await", "user_embed(username) except LetterboxdError as err: msg = err await send_msg(ctx,", "import crew_embed from diary import diary_embed from film import film_embed", "username): try: msg = await diary_embed(username) except LetterboxdError as err:", "ratings for specific servers if ctx.guild and ctx.guild.id in SETTINGS['mkdb_servers']:", "requires a parameter.') elif isinstance(error, commands.BotMissingPermissions): await ctx.send('This command requires", "= err await send_msg(ctx, msg) async def check_if_two_args(ctx): msg =", "commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help') @bot.event async def on_ready(): logging.info( 'Logged in", "{} permission.'.format( ', '.join(err for err in error.missing_perms))) elif isinstance(error,", "62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created by Porkepik#2664'," ]
[ "= [] for (k, s) in store.list_workflow(): if s ==", "-> ray.ObjectRef: \"\"\"Resume a workflow asynchronously. See \"api.resume()\" for details.", "\"\"\" store = get_global_storage() assert ray.is_initialized() if workflow_id is None:", "= workflow_storage.get_workflow_storage(workflow_id) meta = store.load_workflow_meta() if meta is None: raise", "to fix this later. \"\"\" store = get_global_storage() assert ray.is_initialized()", "of a running workflow. See \"api.get_output()\" for details. \"\"\" assert", "is important to 'ray.get' the returned output. This # ensures", "for details. \"\"\" storage = get_global_storage() logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\", storage_url=\"", "str, name: Optional[str]) -> ray.ObjectRef: \"\"\"Get the output of a", "as e: raise ValueError( \"Failed to connect to the workflow", "in status_filter: ret.append((k, s)) return ret def resume_all(with_failed: bool) ->", "result: \"WorkflowExecutionResult\" = ( await workflow_manager.run_or_resume.remote(wid)) obj = flatten_workflow_output(wid, result.persisted_output)", "= ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING in status_filter and len(status_filter) == 1:", "from ray.experimental.workflow import workflow_storage from ray.experimental.workflow.common import (Workflow, WorkflowStatus, WorkflowMetaData,", "workflow [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager = get_or_create_management_actor() # NOTE: It", "{workflow_id} resumed.\") return flatten_workflow_output(workflow_id, result.persisted_output) def get_output(workflow_id: str, name: Optional[str])", "\"\"\"Resume a workflow asynchronously. See \"api.resume()\" for details. \"\"\" storage", "the reference of the # workflow output, the caller may", "workflow {wid}\") return (wid, None) ret = workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for", "to resume workflow {wid}\") return (wid, None) ret = workflow_storage.asyncio_run(", "None: raise ValueError(f\"No such workflow_id {workflow_id}\") return meta.status def list_all(status_filter:", "workflow UUID}.{Unix time to nanoseconds} workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job", "try: workflow_manager = get_management_actor() except Exception as e: raise RuntimeError(\"Failed", "== WorkflowStatus.RUNNING and k not in runnings: s = WorkflowStatus.RESUMABLE", "resolve the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if", "ValueError( \"Failed to connect to the workflow management \" \"actor.", "ret = workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for (wid, _) in all_failed])) return", "resume_all(with_failed: bool) -> List[Tuple[str, ray.ObjectRef]]: filter_set = {WorkflowStatus.RESUMABLE} if with_failed:", "= (entry_workflow.data.step_type != StepType.FUNCTION) # NOTE: It is important to", "so use empty one instead store = workflow_storage.get_workflow_storage(\"\") ret =", "import asyncio import logging import time from typing import Set,", "= get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError: wf_store = workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def", "run(entry_workflow: Workflow, workflow_id: Optional[str] = None, overwrite: bool = True)", "= workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id: str) -> Optional[WorkflowStatus]: try: workflow_manager", "TYPE_CHECKING import uuid import ray from ray.experimental.workflow import workflow_context from", "= get_management_actor() running = ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except Exception: running =", "See \"api.resume()\" for details. \"\"\" storage = get_global_storage() logger.info(f\"Resuming workflow", "have workflow id, so use empty one instead store =", "fail to resolve the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(workflow_id,", "asynchronously. See \"api.resume()\" for details. \"\"\" storage = get_global_storage() logger.info(f\"Resuming", "ValueError as e: raise ValueError( \"Failed to connect to the", "workflow_context from ray.experimental.workflow import workflow_storage from ray.experimental.workflow.common import (Workflow, WorkflowStatus,", "try: result: \"WorkflowExecutionResult\" = ( await workflow_manager.run_or_resume.remote(wid)) obj = flatten_workflow_output(wid,", "\"api.resume()\" for details. \"\"\" storage = get_global_storage() logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\",", "running: return WorkflowStatus.RUNNING store = workflow_storage.get_workflow_storage(workflow_id) meta = store.load_workflow_meta() if", "resumed.\") return flatten_workflow_output(workflow_id, result.persisted_output) def get_output(workflow_id: str, name: Optional[str]) ->", "[] for (k, s) in store.list_workflow(): if s == WorkflowStatus.RUNNING", "commit_step from ray.experimental.workflow.storage import get_global_storage from ray.experimental.workflow.workflow_access import ( flatten_workflow_output,", "Otherwise if the actor removes the reference of the #", "logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager = get_or_create_management_actor() # NOTE:", "r in runnings] runnings = set(runnings) # Here we don't", "[(wid, obj) for (wid, obj) in ret if obj is", "get management actor\") from e async def _resume_one(wid: str) ->", "def list_all(status_filter: Set[WorkflowStatus] ) -> List[Tuple[str, WorkflowStatus]]: try: workflow_manager =", "time to nanoseconds} workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job created. [id=\\\"{workflow_id}\\\",", "reference to the workflow # result. Otherwise if the actor", "if s == WorkflowStatus.RUNNING and k not in runnings: s", "the workflow ws = workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\", entry_workflow) workflow_manager =", "workflow_context.workflow_step_context(workflow_id, store.storage_url): # checkpoint the workflow ws = workflow_storage.get_workflow_storage(workflow_id) commit_step(ws,", "\"\", entry_workflow) workflow_manager = get_or_create_management_actor() ignore_existing = (entry_workflow.data.step_type != StepType.FUNCTION)", "status_filter and len(status_filter) == 1: return [(r, WorkflowStatus.RUNNING) for r", "return WorkflowStatus.RUNNING store = workflow_storage.get_workflow_storage(workflow_id) meta = store.load_workflow_meta() if meta", "Workflow, workflow_id: Optional[str] = None, overwrite: bool = True) ->", "nanoseconds} workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job created. [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\")", "get_status(workflow_id: str) -> Optional[WorkflowStatus]: try: workflow_manager = get_management_actor() running =", "\"\"\" storage = get_global_storage() logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager", "WorkflowStatus.RUNNING store = workflow_storage.get_workflow_storage(workflow_id) meta = store.load_workflow_meta() if meta is", "WorkflowStatus.RUNNING and k not in runnings: s = WorkflowStatus.RESUMABLE if", "resume workflow {wid}\") return (wid, None) ret = workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid)", "def resume(workflow_id: str) -> ray.ObjectRef: \"\"\"Resume a workflow asynchronously. See", "result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if entry_workflow.data.step_type == StepType.FUNCTION:", "workflow. See \"api.get_output()\" for details. \"\"\" assert ray.is_initialized() try: workflow_manager", "= ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if entry_workflow.data.step_type == StepType.FUNCTION: return flatten_workflow_output(workflow_id,", "'run()' holds the reference to the workflow # result. Otherwise", "could have already failed. You can use \" \"workflow.resume() to", "the # workflow output, the caller may fail to resolve", "workflow output, the caller may fail to resolve the result.", "try: workflow_manager = get_management_actor() running = ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except Exception:", "import uuid import ray from ray.experimental.workflow import workflow_context from ray.experimental.workflow", "= WorkflowStatus.RESUMABLE if s in status_filter: ret.append((k, s)) return ret", "result.persisted_output) else: return flatten_workflow_output(workflow_id, result.volatile_output) # TODO(suquark): support recovery with", "bool = True) -> ray.ObjectRef: \"\"\"Run a workflow asynchronously. #", "WorkflowExecutionResult logger = logging.getLogger(__name__) def run(entry_workflow: Workflow, workflow_id: Optional[str] =", "str) -> Tuple[str, Optional[ray.ObjectRef]]: try: result: \"WorkflowExecutionResult\" = ( await", "inputs. def resume(workflow_id: str) -> ray.ObjectRef: \"\"\"Resume a workflow asynchronously.", "You can use \" \"workflow.resume() to resume the workflow.\") from", "raise ValueError( \"Failed to connect to the workflow management \"", "from ray.experimental.workflow.common import (Workflow, WorkflowStatus, WorkflowMetaData, StepType) from ray.experimental.workflow.step_executor import", "try: workflow_manager = get_management_actor() except ValueError: workflow_manager = None if", "ignore_existing=False)) logger.info(f\"Workflow job {workflow_id} resumed.\") return flatten_workflow_output(workflow_id, result.persisted_output) def get_output(workflow_id:", "entry_workflow) workflow_manager = get_or_create_management_actor() ignore_existing = (entry_workflow.data.step_type != StepType.FUNCTION) #", "-> List[Tuple[str, WorkflowStatus]]: try: workflow_manager = get_management_actor() except ValueError: workflow_manager", "get_management_actor() except ValueError as e: raise ValueError( \"Failed to connect", "overwrite existing workflow. # We need to fix this later.", "# workflow output, the caller may fail to resolve the", "except Exception: logger.error(f\"Failed to resume workflow {wid}\") return (wid, None)", "from typing import Set, List, Tuple, Optional, TYPE_CHECKING import uuid", "(Workflow, WorkflowStatus, WorkflowMetaData, StepType) from ray.experimental.workflow.step_executor import commit_step from ray.experimental.workflow.storage", "the reference to the workflow # result. Otherwise if the", "store = workflow_storage.get_workflow_storage(\"\") ret = [] for (k, s) in", "get_output(workflow_id: str, name: Optional[str]) -> ray.ObjectRef: \"\"\"Get the output of", "workflow_manager = None if workflow_manager is None: runnings = []", "workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\", entry_workflow) workflow_manager = get_or_create_management_actor() ignore_existing = (entry_workflow.data.step_type", "a running workflow. See \"api.get_output()\" for details. \"\"\" assert ray.is_initialized()", "[id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager = get_or_create_management_actor() # NOTE: It is", "is None: raise ValueError(f\"No such workflow_id {workflow_id}\") return meta.status def", "try: workflow_manager = get_management_actor() except ValueError as e: raise ValueError(", "workflow_storage.get_workflow_storage(\"\") ret = [] for (k, s) in store.list_workflow(): if", "-> Tuple[str, Optional[ray.ObjectRef]]: try: result: \"WorkflowExecutionResult\" = ( await workflow_manager.run_or_resume.remote(wid))", "ray.experimental.workflow.step_executor import commit_step from ray.experimental.workflow.storage import get_global_storage from ray.experimental.workflow.workflow_access import", "from ray.experimental.workflow.workflow_access import ( flatten_workflow_output, get_or_create_management_actor, get_management_actor) if TYPE_CHECKING: from", "\" \"workflow.resume() to resume the workflow.\") from e output =", "ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING in status_filter and len(status_filter) == 1: return", "See \"api.get_output()\" for details. \"\"\" assert ray.is_initialized() try: workflow_manager =", "output = ray.get(workflow_manager.get_output.remote(workflow_id, name)) return flatten_workflow_output(workflow_id, output) def cancel(workflow_id: str)", "to get management actor\") from e async def _resume_one(wid: str)", "of 'run()' holds the reference to the workflow # result.", "Tuple[str, Optional[ray.ObjectRef]]: try: result: \"WorkflowExecutionResult\" = ( await workflow_manager.run_or_resume.remote(wid)) obj", "output of a running workflow. See \"api.get_output()\" for details. \"\"\"", "returned output. This # ensures caller of 'run()' holds the", "\"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if entry_workflow.data.step_type == StepType.FUNCTION: return", "get_management_actor() except Exception as e: raise RuntimeError(\"Failed to get management", "Exception: running = False if running: return WorkflowStatus.RUNNING store =", "return flatten_workflow_output(workflow_id, result.volatile_output) # TODO(suquark): support recovery with ObjectRef inputs.", "= set(runnings) # Here we don't have workflow id, so", "ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError: wf_store = workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id: str)", "WorkflowStatus.RUNNING in status_filter and len(status_filter) == 1: return [(r, WorkflowStatus.RUNNING)", "ray.experimental.workflow import workflow_storage from ray.experimental.workflow.common import (Workflow, WorkflowStatus, WorkflowMetaData, StepType)", "List[Tuple[str, ray.ObjectRef]]: filter_set = {WorkflowStatus.RESUMABLE} if with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed =", "def cancel(workflow_id: str) -> None: try: workflow_manager = get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id))", "fix this later. \"\"\" store = get_global_storage() assert ray.is_initialized() if", "with workflow_context.workflow_step_context(workflow_id, store.storage_url): # checkpoint the workflow ws = workflow_storage.get_workflow_storage(workflow_id)", "get_or_create_management_actor() ignore_existing = (entry_workflow.data.step_type != StepType.FUNCTION) # NOTE: It is", "we don't have workflow id, so use empty one instead", "resolve the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False))", "ret = [] for (k, s) in store.list_workflow(): if s", "str) -> ray.ObjectRef: \"\"\"Resume a workflow asynchronously. See \"api.resume()\" for", "workflow_manager = get_management_actor() except ValueError: workflow_manager = None if workflow_manager", "Optional, TYPE_CHECKING import uuid import ray from ray.experimental.workflow import workflow_context", "a workflow asynchronously. See \"api.resume()\" for details. \"\"\" storage =", "WorkflowStatus]]: try: workflow_manager = get_management_actor() except ValueError: workflow_manager = None", "commit_step(ws, \"\", entry_workflow) workflow_manager = get_or_create_management_actor() ignore_existing = (entry_workflow.data.step_type !=", "failed. You can use \" \"workflow.resume() to resume the workflow.\")", "Optional[str] = None, overwrite: bool = True) -> ray.ObjectRef: \"\"\"Run", "= workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\", entry_workflow) workflow_manager = get_or_create_management_actor() ignore_existing =", "The current \"run\" always overwrite existing workflow. # We need", "List[Tuple[str, WorkflowStatus]]: try: workflow_manager = get_management_actor() except ValueError: workflow_manager =", "flatten_workflow_output(workflow_id, result.volatile_output) # TODO(suquark): support recovery with ObjectRef inputs. def", "ret def resume_all(with_failed: bool) -> List[Tuple[str, ray.ObjectRef]]: filter_set = {WorkflowStatus.RESUMABLE}", "get_or_create_management_actor, get_management_actor) if TYPE_CHECKING: from ray.experimental.workflow.step_executor import WorkflowExecutionResult logger =", "= [] else: runnings = ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING in status_filter", "except Exception as e: raise RuntimeError(\"Failed to get management actor\")", "_resume_one(wid: str) -> Tuple[str, Optional[ray.ObjectRef]]: try: result: \"WorkflowExecutionResult\" = (", "def get_status(workflow_id: str) -> Optional[WorkflowStatus]: try: workflow_manager = get_management_actor() running", "result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if entry_workflow.data.step_type ==", "from ray.experimental.workflow.storage import get_global_storage from ray.experimental.workflow.workflow_access import ( flatten_workflow_output, get_or_create_management_actor,", "the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if entry_workflow.data.step_type", "bool) -> List[Tuple[str, ray.ObjectRef]]: filter_set = {WorkflowStatus.RESUMABLE} if with_failed: filter_set.add(WorkflowStatus.FAILED)", "is None: # Workflow ID format: {Entry workflow UUID}.{Unix time", "for details. \"\"\" assert ray.is_initialized() try: workflow_manager = get_management_actor() except", "= f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job created. [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id,", "return flatten_workflow_output(workflow_id, result.persisted_output) else: return flatten_workflow_output(workflow_id, result.volatile_output) # TODO(suquark): support", "result.volatile_output) # TODO(suquark): support recovery with ObjectRef inputs. def resume(workflow_id:", "Optional[ray.ObjectRef]]: try: result: \"WorkflowExecutionResult\" = ( await workflow_manager.run_or_resume.remote(wid)) obj =", "caller may fail to resolve the result. result: \"WorkflowExecutionResult\" =", "flatten_workflow_output(workflow_id, output) def cancel(workflow_id: str) -> None: try: workflow_manager =", "( await workflow_manager.run_or_resume.remote(wid)) obj = flatten_workflow_output(wid, result.persisted_output) return wid, obj", "Exception: logger.error(f\"Failed to resume workflow {wid}\") return (wid, None) ret", "set(runnings) # Here we don't have workflow id, so use", "for (wid, obj) in ret if obj is not None]", "time from typing import Set, List, Tuple, Optional, TYPE_CHECKING import", "store.load_workflow_meta() if meta is None: raise ValueError(f\"No such workflow_id {workflow_id}\")", "if with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed = list_all(filter_set) try: workflow_manager = get_management_actor()", "-> List[Tuple[str, ray.ObjectRef]]: filter_set = {WorkflowStatus.RESUMABLE} if with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed", "# result. Otherwise if the actor removes the reference of", "workflow_id, ignore_existing=False)) logger.info(f\"Workflow job {workflow_id} resumed.\") return flatten_workflow_output(workflow_id, result.persisted_output) def", "id, so use empty one instead store = workflow_storage.get_workflow_storage(\"\") ret", "return meta.status def list_all(status_filter: Set[WorkflowStatus] ) -> List[Tuple[str, WorkflowStatus]]: try:", "\"WorkflowExecutionResult\" = ( await workflow_manager.run_or_resume.remote(wid)) obj = flatten_workflow_output(wid, result.persisted_output) return", "workflow_manager = get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError: wf_store = workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED))", "running workflow. See \"api.get_output()\" for details. \"\"\" assert ray.is_initialized() try:", "result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False)) logger.info(f\"Workflow job {workflow_id}", "entry_workflow.data.step_type == StepType.FUNCTION: return flatten_workflow_output(workflow_id, result.persisted_output) else: return flatten_workflow_output(workflow_id, result.volatile_output)", "later. \"\"\" store = get_global_storage() assert ray.is_initialized() if workflow_id is", "from e output = ray.get(workflow_manager.get_output.remote(workflow_id, name)) return flatten_workflow_output(workflow_id, output) def", "TODO(suquark): The current \"run\" always overwrite existing workflow. # We", "to resolve the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing))", "We need to fix this later. \"\"\" store = get_global_storage()", "workflow_id {workflow_id}\") return meta.status def list_all(status_filter: Set[WorkflowStatus] ) -> List[Tuple[str,", "= None if workflow_manager is None: runnings = [] else:", "workflow_storage from ray.experimental.workflow.common import (Workflow, WorkflowStatus, WorkflowMetaData, StepType) from ray.experimental.workflow.step_executor", "{WorkflowStatus.RESUMABLE} if with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed = list_all(filter_set) try: workflow_manager =", "= logging.getLogger(__name__) def run(entry_workflow: Workflow, workflow_id: Optional[str] = None, overwrite:", "Tuple, Optional, TYPE_CHECKING import uuid import ray from ray.experimental.workflow import", "StepType.FUNCTION) # NOTE: It is important to 'ray.get' the returned", "obj except Exception: logger.error(f\"Failed to resume workflow {wid}\") return (wid,", "flatten_workflow_output, get_or_create_management_actor, get_management_actor) if TYPE_CHECKING: from ray.experimental.workflow.step_executor import WorkflowExecutionResult logger", "is None: runnings = [] else: runnings = ray.get(workflow_manager.list_running_workflow.remote()) if", "return [(wid, obj) for (wid, obj) in ret if obj", "the workflow # result. Otherwise if the actor removes the", "result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False)) logger.info(f\"Workflow job", "workflow. # We need to fix this later. \"\"\" store", "flatten_workflow_output(workflow_id, result.persisted_output) def get_output(workflow_id: str, name: Optional[str]) -> ray.ObjectRef: \"\"\"Get", "already failed. You can use \" \"workflow.resume() to resume the", "have already failed. You can use \" \"workflow.resume() to resume", "ValueError: wf_store = workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id: str) -> Optional[WorkflowStatus]:", "import get_global_storage from ray.experimental.workflow.workflow_access import ( flatten_workflow_output, get_or_create_management_actor, get_management_actor) if", "else: runnings = ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING in status_filter and len(status_filter)", "all_failed])) return [(wid, obj) for (wid, obj) in ret if", "if entry_workflow.data.step_type == StepType.FUNCTION: return flatten_workflow_output(workflow_id, result.persisted_output) else: return flatten_workflow_output(workflow_id,", "ray.experimental.workflow.workflow_access import ( flatten_workflow_output, get_or_create_management_actor, get_management_actor) if TYPE_CHECKING: from ray.experimental.workflow.step_executor", "format: {Entry workflow UUID}.{Unix time to nanoseconds} workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\"", "the output of a running workflow. See \"api.get_output()\" for details.", "ray.experimental.workflow import workflow_context from ray.experimental.workflow import workflow_storage from ray.experimental.workflow.common import", "storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager = get_or_create_management_actor() # NOTE: It is important", "list_all(status_filter: Set[WorkflowStatus] ) -> List[Tuple[str, WorkflowStatus]]: try: workflow_manager = get_management_actor()", "return [(r, WorkflowStatus.RUNNING) for r in runnings] runnings = set(runnings)", "created. [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id, store.storage_url): # checkpoint the", "store = get_global_storage() assert ray.is_initialized() if workflow_id is None: #", "workflow_manager.run_or_resume.remote(wid)) obj = flatten_workflow_output(wid, result.persisted_output) return wid, obj except Exception:", "Set, List, Tuple, Optional, TYPE_CHECKING import uuid import ray from", "import logging import time from typing import Set, List, Tuple,", "workflow_manager is None: runnings = [] else: runnings = ray.get(workflow_manager.list_running_workflow.remote())", "the workflow.\") from e output = ray.get(workflow_manager.get_output.remote(workflow_id, name)) return flatten_workflow_output(workflow_id,", "logging.getLogger(__name__) def run(entry_workflow: Workflow, workflow_id: Optional[str] = None, overwrite: bool", "meta = store.load_workflow_meta() if meta is None: raise ValueError(f\"No such", "ret.append((k, s)) return ret def resume_all(with_failed: bool) -> List[Tuple[str, ray.ObjectRef]]:", "= get_global_storage() logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager = get_or_create_management_actor()", "e async def _resume_one(wid: str) -> Tuple[str, Optional[ray.ObjectRef]]: try: result:", "None: # Workflow ID format: {Entry workflow UUID}.{Unix time to", "import workflow_storage from ray.experimental.workflow.common import (Workflow, WorkflowStatus, WorkflowMetaData, StepType) from", "== 1: return [(r, WorkflowStatus.RUNNING) for r in runnings] runnings", "WorkflowStatus.RUNNING) for r in runnings] runnings = set(runnings) # Here", "empty one instead store = workflow_storage.get_workflow_storage(\"\") ret = [] for", "not in runnings: s = WorkflowStatus.RESUMABLE if s in status_filter:", "the returned output. This # ensures caller of 'run()' holds", "obj) for (wid, obj) in ret if obj is not", "{Entry workflow UUID}.{Unix time to nanoseconds} workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow", "= workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for (wid, _) in all_failed])) return [(wid,", "ensures caller of 'run()' holds the reference to the workflow", "current \"run\" always overwrite existing workflow. # We need to", "workflow_manager = get_or_create_management_actor() # NOTE: It is important to 'ray.get'", "( flatten_workflow_output, get_or_create_management_actor, get_management_actor) if TYPE_CHECKING: from ray.experimental.workflow.step_executor import WorkflowExecutionResult", "= ( await workflow_manager.run_or_resume.remote(wid)) obj = flatten_workflow_output(wid, result.persisted_output) return wid,", "uuid import ray from ray.experimental.workflow import workflow_context from ray.experimental.workflow import", "get_or_create_management_actor() # NOTE: It is important to 'ray.get' the returned", "ray.is_initialized() if workflow_id is None: # Workflow ID format: {Entry", "# We need to fix this later. \"\"\" store =", "to 'ray.get' the returned output. This # ensures caller of", "need to fix this later. \"\"\" store = get_global_storage() assert", "def run(entry_workflow: Workflow, workflow_id: Optional[str] = None, overwrite: bool =", "cancel(workflow_id: str) -> None: try: workflow_manager = get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except", "wid, obj except Exception: logger.error(f\"Failed to resume workflow {wid}\") return", "def _resume_one(wid: str) -> Tuple[str, Optional[ray.ObjectRef]]: try: result: \"WorkflowExecutionResult\" =", "(wid, None) ret = workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for (wid, _) in", "-> ray.ObjectRef: \"\"\"Get the output of a running workflow. See", "workflow asynchronously. See \"api.resume()\" for details. \"\"\" storage = get_global_storage()", "UUID}.{Unix time to nanoseconds} workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job created.", "workflow could have already failed. You can use \" \"workflow.resume()", "can use \" \"workflow.resume() to resume the workflow.\") from e", "# TODO(suquark): support recovery with ObjectRef inputs. def resume(workflow_id: str)", "f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job created. [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id, store.storage_url):", "\"run\" always overwrite existing workflow. # We need to fix", "if the actor removes the reference of the # workflow", "from ray.experimental.workflow import workflow_context from ray.experimental.workflow import workflow_storage from ray.experimental.workflow.common", "None) ret = workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for (wid, _) in all_failed]))", "to the workflow # result. Otherwise if the actor removes", "removes the reference of the # workflow output, the caller", "Here we don't have workflow id, so use empty one", "get_management_actor() except ValueError: workflow_manager = None if workflow_manager is None:", "filter_set = {WorkflowStatus.RESUMABLE} if with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed = list_all(filter_set) try:", "workflow_storage.get_workflow_storage(workflow_id) meta = store.load_workflow_meta() if meta is None: raise ValueError(f\"No", "workflow_id is None: # Workflow ID format: {Entry workflow UUID}.{Unix", "support recovery with ObjectRef inputs. def resume(workflow_id: str) -> ray.ObjectRef:", "= False if running: return WorkflowStatus.RUNNING store = workflow_storage.get_workflow_storage(workflow_id) meta", "s == WorkflowStatus.RUNNING and k not in runnings: s =", "ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if entry_workflow.data.step_type == StepType.FUNCTION: return flatten_workflow_output(workflow_id, result.persisted_output)", "Optional[WorkflowStatus]: try: workflow_manager = get_management_actor() running = ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except", "import (Workflow, WorkflowStatus, WorkflowMetaData, StepType) from ray.experimental.workflow.step_executor import commit_step from", "get_management_actor) if TYPE_CHECKING: from ray.experimental.workflow.step_executor import WorkflowExecutionResult logger = logging.getLogger(__name__)", "fail to resolve the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(", "workflow_manager.is_workflow_running.remote(workflow_id)) except Exception: running = False if running: return WorkflowStatus.RUNNING", "= get_or_create_management_actor() # NOTE: It is important to 'ray.get' the", "[] else: runnings = ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING in status_filter and", "resume the workflow.\") from e output = ray.get(workflow_manager.get_output.remote(workflow_id, name)) return", "typing import Set, List, Tuple, Optional, TYPE_CHECKING import uuid import", "use empty one instead store = workflow_storage.get_workflow_storage(\"\") ret = []", "running = False if running: return WorkflowStatus.RUNNING store = workflow_storage.get_workflow_storage(workflow_id)", "= get_management_actor() except Exception as e: raise RuntimeError(\"Failed to get", "import Set, List, Tuple, Optional, TYPE_CHECKING import uuid import ray", "True) -> ray.ObjectRef: \"\"\"Run a workflow asynchronously. # TODO(suquark): The", "# Workflow ID format: {Entry workflow UUID}.{Unix time to nanoseconds}", "{workflow_id}\") return meta.status def list_all(status_filter: Set[WorkflowStatus] ) -> List[Tuple[str, WorkflowStatus]]:", "None: runnings = [] else: runnings = ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING", "ignore_existing)) if entry_workflow.data.step_type == StepType.FUNCTION: return flatten_workflow_output(workflow_id, result.persisted_output) else: return", "runnings = ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING in status_filter and len(status_filter) ==", "ray.get( workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False)) logger.info(f\"Workflow job {workflow_id} resumed.\") return flatten_workflow_output(workflow_id,", "connect to the workflow management \" \"actor. The workflow could", "the actor removes the reference of the # workflow output,", "= workflow_storage.get_workflow_storage(\"\") ret = [] for (k, s) in store.list_workflow():", "# ensures caller of 'run()' holds the reference to the", "= ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except Exception: running = False if running:", "result.persisted_output) return wid, obj except Exception: logger.error(f\"Failed to resume workflow", "\"\"\" assert ray.is_initialized() try: workflow_manager = get_management_actor() except ValueError as", "ray.ObjectRef: \"\"\"Run a workflow asynchronously. # TODO(suquark): The current \"run\"", "output. This # ensures caller of 'run()' holds the reference", "job created. [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id, store.storage_url): # checkpoint", "f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id, store.storage_url): # checkpoint the workflow ws =", "workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False)) logger.info(f\"Workflow job {workflow_id} resumed.\") return flatten_workflow_output(workflow_id, result.persisted_output)", "to resume the workflow.\") from e output = ray.get(workflow_manager.get_output.remote(workflow_id, name))", "import WorkflowExecutionResult logger = logging.getLogger(__name__) def run(entry_workflow: Workflow, workflow_id: Optional[str]", "len(status_filter) == 1: return [(r, WorkflowStatus.RUNNING) for r in runnings]", "str) -> None: try: workflow_manager = get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError:", "and len(status_filter) == 1: return [(r, WorkflowStatus.RUNNING) for r in", "with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed = list_all(filter_set) try: workflow_manager = get_management_actor() except", "wf_store = workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id: str) -> Optional[WorkflowStatus]: try:", "reference of the # workflow output, the caller may fail", "running = ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except Exception: running = False if", ") -> List[Tuple[str, WorkflowStatus]]: try: workflow_manager = get_management_actor() except ValueError:", "storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id, store.storage_url): # checkpoint the workflow ws", "runnings = [] else: runnings = ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING in", "(wid, _) in all_failed])) return [(wid, obj) for (wid, obj)", "filter_set.add(WorkflowStatus.FAILED) all_failed = list_all(filter_set) try: workflow_manager = get_management_actor() except Exception", "workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job created. [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with", "ray.ObjectRef: \"\"\"Resume a workflow asynchronously. See \"api.resume()\" for details. \"\"\"", "StepType) from ray.experimental.workflow.step_executor import commit_step from ray.experimental.workflow.storage import get_global_storage from", "It is important to 'ray.get' the returned output. This #", "\"\"\"Get the output of a running workflow. See \"api.get_output()\" for", "return ret def resume_all(with_failed: bool) -> List[Tuple[str, ray.ObjectRef]]: filter_set =", "workflow # result. Otherwise if the actor removes the reference", "ray.is_initialized() try: workflow_manager = get_management_actor() except ValueError as e: raise", "= ray.get(workflow_manager.get_output.remote(workflow_id, name)) return flatten_workflow_output(workflow_id, output) def cancel(workflow_id: str) ->", "TYPE_CHECKING: from ray.experimental.workflow.step_executor import WorkflowExecutionResult logger = logging.getLogger(__name__) def run(entry_workflow:", "await workflow_manager.run_or_resume.remote(wid)) obj = flatten_workflow_output(wid, result.persisted_output) return wid, obj except", "asynchronously. # TODO(suquark): The current \"run\" always overwrite existing workflow.", "for r in runnings] runnings = set(runnings) # Here we", "ws = workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\", entry_workflow) workflow_manager = get_or_create_management_actor() ignore_existing", "actor removes the reference of the # workflow output, the", "return wid, obj except Exception: logger.error(f\"Failed to resume workflow {wid}\")", "store = workflow_storage.get_workflow_storage(workflow_id) meta = store.load_workflow_meta() if meta is None:", "flatten_workflow_output(wid, result.persisted_output) return wid, obj except Exception: logger.error(f\"Failed to resume", "Exception as e: raise RuntimeError(\"Failed to get management actor\") from", "return (wid, None) ret = workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for (wid, _)", "management actor\") from e async def _resume_one(wid: str) -> Tuple[str,", "import commit_step from ray.experimental.workflow.storage import get_global_storage from ray.experimental.workflow.workflow_access import (", "import time from typing import Set, List, Tuple, Optional, TYPE_CHECKING", "in status_filter and len(status_filter) == 1: return [(r, WorkflowStatus.RUNNING) for", "= True) -> ray.ObjectRef: \"\"\"Run a workflow asynchronously. # TODO(suquark):", "def get_output(workflow_id: str, name: Optional[str]) -> ray.ObjectRef: \"\"\"Get the output", "Optional[str]) -> ray.ObjectRef: \"\"\"Get the output of a running workflow.", "may fail to resolve the result. result: \"WorkflowExecutionResult\" = ray.get(", "-> Optional[WorkflowStatus]: try: workflow_manager = get_management_actor() running = ray.get( workflow_manager.is_workflow_running.remote(workflow_id))", "async def _resume_one(wid: str) -> Tuple[str, Optional[ray.ObjectRef]]: try: result: \"WorkflowExecutionResult\"", "WorkflowStatus, WorkflowMetaData, StepType) from ray.experimental.workflow.step_executor import commit_step from ray.experimental.workflow.storage import", "if running: return WorkflowStatus.RUNNING store = workflow_storage.get_workflow_storage(workflow_id) meta = store.load_workflow_meta()", "status_filter: ret.append((k, s)) return ret def resume_all(with_failed: bool) -> List[Tuple[str,", "ray.ObjectRef: \"\"\"Get the output of a running workflow. See \"api.get_output()\"", "# Here we don't have workflow id, so use empty", "!= StepType.FUNCTION) # NOTE: It is important to 'ray.get' the", "ray.experimental.workflow.storage import get_global_storage from ray.experimental.workflow.workflow_access import ( flatten_workflow_output, get_or_create_management_actor, get_management_actor)", "from e async def _resume_one(wid: str) -> Tuple[str, Optional[ray.ObjectRef]]: try:", "StepType.FUNCTION: return flatten_workflow_output(workflow_id, result.persisted_output) else: return flatten_workflow_output(workflow_id, result.volatile_output) # TODO(suquark):", "workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for (wid, _) in all_failed])) return [(wid, obj)", "return flatten_workflow_output(workflow_id, result.persisted_output) def get_output(workflow_id: str, name: Optional[str]) -> ray.ObjectRef:", "None, overwrite: bool = True) -> ray.ObjectRef: \"\"\"Run a workflow", "def resume_all(with_failed: bool) -> List[Tuple[str, ray.ObjectRef]]: filter_set = {WorkflowStatus.RESUMABLE} if", "TODO(suquark): support recovery with ObjectRef inputs. def resume(workflow_id: str) ->", "= ray.get( workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False)) logger.info(f\"Workflow job {workflow_id} resumed.\") return", "store.list_workflow(): if s == WorkflowStatus.RUNNING and k not in runnings:", "This # ensures caller of 'run()' holds the reference to", "List, Tuple, Optional, TYPE_CHECKING import uuid import ray from ray.experimental.workflow", "as e: raise RuntimeError(\"Failed to get management actor\") from e", "WorkflowStatus.RESUMABLE if s in status_filter: ret.append((k, s)) return ret def", "actor\") from e async def _resume_one(wid: str) -> Tuple[str, Optional[ray.ObjectRef]]:", "workflow.\") from e output = ray.get(workflow_manager.get_output.remote(workflow_id, name)) return flatten_workflow_output(workflow_id, output)", "f\"\\\"{storage.storage_url}\\\"].\") workflow_manager = get_or_create_management_actor() # NOTE: It is important to", "-> None: try: workflow_manager = get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError: wf_store", "workflow_manager = get_management_actor() running = ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except Exception: running", "details. \"\"\" storage = get_global_storage() logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\")", "raise ValueError(f\"No such workflow_id {workflow_id}\") return meta.status def list_all(status_filter: Set[WorkflowStatus]", "ray from ray.experimental.workflow import workflow_context from ray.experimental.workflow import workflow_storage from", "workflow_manager = get_management_actor() except Exception as e: raise RuntimeError(\"Failed to", "logging import time from typing import Set, List, Tuple, Optional,", "else: return flatten_workflow_output(workflow_id, result.volatile_output) # TODO(suquark): support recovery with ObjectRef", "Set[WorkflowStatus] ) -> List[Tuple[str, WorkflowStatus]]: try: workflow_manager = get_management_actor() except", "s) in store.list_workflow(): if s == WorkflowStatus.RUNNING and k not", "s = WorkflowStatus.RESUMABLE if s in status_filter: ret.append((k, s)) return", "\"Failed to connect to the workflow management \" \"actor. The", "e: raise RuntimeError(\"Failed to get management actor\") from e async", "1: return [(r, WorkflowStatus.RUNNING) for r in runnings] runnings =", "assert ray.is_initialized() try: workflow_manager = get_management_actor() except ValueError as e:", "= get_management_actor() except ValueError as e: raise ValueError( \"Failed to", "runnings = set(runnings) # Here we don't have workflow id,", "checkpoint the workflow ws = workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\", entry_workflow) workflow_manager", "workflow id, so use empty one instead store = workflow_storage.get_workflow_storage(\"\")", "of the # workflow output, the caller may fail to", "workflow_manager = get_management_actor() except ValueError as e: raise ValueError( \"Failed", "-> ray.ObjectRef: \"\"\"Run a workflow asynchronously. # TODO(suquark): The current", "the workflow management \" \"actor. The workflow could have already", "store.storage_url): # checkpoint the workflow ws = workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\",", "ray.experimental.workflow.common import (Workflow, WorkflowStatus, WorkflowMetaData, StepType) from ray.experimental.workflow.step_executor import commit_step", "asyncio.gather(*[_resume_one(wid) for (wid, _) in all_failed])) return [(wid, obj) for", "if TYPE_CHECKING: from ray.experimental.workflow.step_executor import WorkflowExecutionResult logger = logging.getLogger(__name__) def", "list_all(filter_set) try: workflow_manager = get_management_actor() except Exception as e: raise", "holds the reference to the workflow # result. Otherwise if", "obj = flatten_workflow_output(wid, result.persisted_output) return wid, obj except Exception: logger.error(f\"Failed", "get_global_storage() logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager = get_or_create_management_actor() #", "WorkflowMetaData, StepType) from ray.experimental.workflow.step_executor import commit_step from ray.experimental.workflow.storage import get_global_storage", "meta.status def list_all(status_filter: Set[WorkflowStatus] ) -> List[Tuple[str, WorkflowStatus]]: try: workflow_manager", "= {WorkflowStatus.RESUMABLE} if with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed = list_all(filter_set) try: workflow_manager", "\"api.get_output()\" for details. \"\"\" assert ray.is_initialized() try: workflow_manager = get_management_actor()", "one instead store = workflow_storage.get_workflow_storage(\"\") ret = [] for (k,", "ignore_existing = (entry_workflow.data.step_type != StepType.FUNCTION) # NOTE: It is important", "'ray.get' the returned output. This # ensures caller of 'run()'", "\" \"actor. The workflow could have already failed. You can", "instead store = workflow_storage.get_workflow_storage(\"\") ret = [] for (k, s)", "s in status_filter: ret.append((k, s)) return ret def resume_all(with_failed: bool)", "ray.ObjectRef]]: filter_set = {WorkflowStatus.RESUMABLE} if with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed = list_all(filter_set)", "k not in runnings: s = WorkflowStatus.RESUMABLE if s in", "\"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False)) logger.info(f\"Workflow job {workflow_id} resumed.\")", "storage = get_global_storage() logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager =", "False if running: return WorkflowStatus.RUNNING store = workflow_storage.get_workflow_storage(workflow_id) meta =", "logger.info(f\"Workflow job created. [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id, store.storage_url): #", "don't have workflow id, so use empty one instead store", "to connect to the workflow management \" \"actor. The workflow", "workflow_manager = get_or_create_management_actor() ignore_existing = (entry_workflow.data.step_type != StepType.FUNCTION) # NOTE:", "\"workflow.resume() to resume the workflow.\") from e output = ray.get(workflow_manager.get_output.remote(workflow_id,", "# TODO(suquark): The current \"run\" always overwrite existing workflow. #", "[id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id, store.storage_url): # checkpoint the workflow", "runnings: s = WorkflowStatus.RESUMABLE if s in status_filter: ret.append((k, s))", "from ray.experimental.workflow.step_executor import WorkflowExecutionResult logger = logging.getLogger(__name__) def run(entry_workflow: Workflow,", "ray.get(workflow_manager.get_output.remote(workflow_id, name)) return flatten_workflow_output(workflow_id, output) def cancel(workflow_id: str) -> None:", "assert ray.is_initialized() if workflow_id is None: # Workflow ID format:", "resume(workflow_id: str) -> ray.ObjectRef: \"\"\"Resume a workflow asynchronously. See \"api.resume()\"", "(entry_workflow.data.step_type != StepType.FUNCTION) # NOTE: It is important to 'ray.get'", "caller of 'run()' holds the reference to the workflow #", "<gh_stars>0 import asyncio import logging import time from typing import", "== StepType.FUNCTION: return flatten_workflow_output(workflow_id, result.persisted_output) else: return flatten_workflow_output(workflow_id, result.volatile_output) #", "workflow management \" \"actor. The workflow could have already failed.", "logger = logging.getLogger(__name__) def run(entry_workflow: Workflow, workflow_id: Optional[str] = None,", "wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id: str) -> Optional[WorkflowStatus]: try: workflow_manager = get_management_actor()", "workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id: str) -> Optional[WorkflowStatus]: try: workflow_manager =", "such workflow_id {workflow_id}\") return meta.status def list_all(status_filter: Set[WorkflowStatus] ) ->", "asyncio import logging import time from typing import Set, List,", "in all_failed])) return [(wid, obj) for (wid, obj) in ret", "raise RuntimeError(\"Failed to get management actor\") from e async def", "important to 'ray.get' the returned output. This # ensures caller", "recovery with ObjectRef inputs. def resume(workflow_id: str) -> ray.ObjectRef: \"\"\"Resume", "workflow asynchronously. # TODO(suquark): The current \"run\" always overwrite existing", "for (wid, _) in all_failed])) return [(wid, obj) for (wid,", "import ray from ray.experimental.workflow import workflow_context from ray.experimental.workflow import workflow_storage", "Workflow ID format: {Entry workflow UUID}.{Unix time to nanoseconds} workflow_id", "get_management_actor() running = ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except Exception: running = False", "if workflow_manager is None: runnings = [] else: runnings =", "meta is None: raise ValueError(f\"No such workflow_id {workflow_id}\") return meta.status", "to the workflow management \" \"actor. The workflow could have", "import workflow_context from ray.experimental.workflow import workflow_storage from ray.experimental.workflow.common import (Workflow,", "in runnings] runnings = set(runnings) # Here we don't have", "workflow ws = workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\", entry_workflow) workflow_manager = get_or_create_management_actor()", "= get_or_create_management_actor() ignore_existing = (entry_workflow.data.step_type != StepType.FUNCTION) # NOTE: It", "all_failed = list_all(filter_set) try: workflow_manager = get_management_actor() except Exception as", "the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False)) logger.info(f\"Workflow", "The workflow could have already failed. You can use \"", "_) in all_failed])) return [(wid, obj) for (wid, obj) in", "if meta is None: raise ValueError(f\"No such workflow_id {workflow_id}\") return", "\"actor. The workflow could have already failed. You can use", "{wid}\") return (wid, None) ret = workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for (wid,", "logger.info(f\"Workflow job {workflow_id} resumed.\") return flatten_workflow_output(workflow_id, result.persisted_output) def get_output(workflow_id: str,", "import ( flatten_workflow_output, get_or_create_management_actor, get_management_actor) if TYPE_CHECKING: from ray.experimental.workflow.step_executor import", "except ValueError: wf_store = workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id: str) ->", "[(r, WorkflowStatus.RUNNING) for r in runnings] runnings = set(runnings) #", "logger.error(f\"Failed to resume workflow {wid}\") return (wid, None) ret =", "except ValueError as e: raise ValueError( \"Failed to connect to", "except Exception: running = False if running: return WorkflowStatus.RUNNING store", "existing workflow. # We need to fix this later. \"\"\"", "a workflow asynchronously. # TODO(suquark): The current \"run\" always overwrite", "job {workflow_id} resumed.\") return flatten_workflow_output(workflow_id, result.persisted_output) def get_output(workflow_id: str, name:", "if s in status_filter: ret.append((k, s)) return ret def resume_all(with_failed:", "with ObjectRef inputs. def resume(workflow_id: str) -> ray.ObjectRef: \"\"\"Resume a", "get_global_storage from ray.experimental.workflow.workflow_access import ( flatten_workflow_output, get_or_create_management_actor, get_management_actor) if TYPE_CHECKING:", "name: Optional[str]) -> ray.ObjectRef: \"\"\"Get the output of a running", "s)) return ret def resume_all(with_failed: bool) -> List[Tuple[str, ray.ObjectRef]]: filter_set", "ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except Exception: running = False if running: return", "output, the caller may fail to resolve the result. result:", "runnings] runnings = set(runnings) # Here we don't have workflow", "None: try: workflow_manager = get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError: wf_store =", "if workflow_id is None: # Workflow ID format: {Entry workflow", "return flatten_workflow_output(workflow_id, output) def cancel(workflow_id: str) -> None: try: workflow_manager", "None if workflow_manager is None: runnings = [] else: runnings", "from ray.experimental.workflow.step_executor import commit_step from ray.experimental.workflow.storage import get_global_storage from ray.experimental.workflow.workflow_access", "= store.load_workflow_meta() if meta is None: raise ValueError(f\"No such workflow_id", "get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError: wf_store = workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id:", "to resolve the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote( workflow_id,", "in store.list_workflow(): if s == WorkflowStatus.RUNNING and k not in", "use \" \"workflow.resume() to resume the workflow.\") from e output", "management \" \"actor. The workflow could have already failed. You", "ValueError(f\"No such workflow_id {workflow_id}\") return meta.status def list_all(status_filter: Set[WorkflowStatus] )", "result. Otherwise if the actor removes the reference of the", "RuntimeError(\"Failed to get management actor\") from e async def _resume_one(wid:", "ValueError: workflow_manager = None if workflow_manager is None: runnings =", "overwrite: bool = True) -> ray.ObjectRef: \"\"\"Run a workflow asynchronously.", "# NOTE: It is important to 'ray.get' the returned output.", "if WorkflowStatus.RUNNING in status_filter and len(status_filter) == 1: return [(r,", "flatten_workflow_output(workflow_id, result.persisted_output) else: return flatten_workflow_output(workflow_id, result.volatile_output) # TODO(suquark): support recovery", "details. \"\"\" assert ray.is_initialized() try: workflow_manager = get_management_actor() except ValueError", "NOTE: It is important to 'ray.get' the returned output. This", "except ValueError: workflow_manager = None if workflow_manager is None: runnings", "in runnings: s = WorkflowStatus.RESUMABLE if s in status_filter: ret.append((k,", "workflow_id: Optional[str] = None, overwrite: bool = True) -> ray.ObjectRef:", "try: workflow_manager = get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError: wf_store = workflow_storage.get_workflow_storage(workflow_id)", "= flatten_workflow_output(wid, result.persisted_output) return wid, obj except Exception: logger.error(f\"Failed to", "ObjectRef inputs. def resume(workflow_id: str) -> ray.ObjectRef: \"\"\"Resume a workflow", "always overwrite existing workflow. # We need to fix this", "to nanoseconds} workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job created. [id=\\\"{workflow_id}\\\", storage_url=\"", "name)) return flatten_workflow_output(workflow_id, output) def cancel(workflow_id: str) -> None: try:", "this later. \"\"\" store = get_global_storage() assert ray.is_initialized() if workflow_id", "= get_global_storage() assert ray.is_initialized() if workflow_id is None: # Workflow", "e output = ray.get(workflow_manager.get_output.remote(workflow_id, name)) return flatten_workflow_output(workflow_id, output) def cancel(workflow_id:", "ID format: {Entry workflow UUID}.{Unix time to nanoseconds} workflow_id =", "str) -> Optional[WorkflowStatus]: try: workflow_manager = get_management_actor() running = ray.get(", "get_global_storage() assert ray.is_initialized() if workflow_id is None: # Workflow ID", "the caller may fail to resolve the result. result: \"WorkflowExecutionResult\"", "output) def cancel(workflow_id: str) -> None: try: workflow_manager = get_management_actor()", "for (k, s) in store.list_workflow(): if s == WorkflowStatus.RUNNING and", "(k, s) in store.list_workflow(): if s == WorkflowStatus.RUNNING and k", "= None, overwrite: bool = True) -> ray.ObjectRef: \"\"\"Run a", "and k not in runnings: s = WorkflowStatus.RESUMABLE if s", "\"\"\"Run a workflow asynchronously. # TODO(suquark): The current \"run\" always", "= list_all(filter_set) try: workflow_manager = get_management_actor() except Exception as e:", "e: raise ValueError( \"Failed to connect to the workflow management", "workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if entry_workflow.data.step_type == StepType.FUNCTION: return flatten_workflow_output(workflow_id, result.persisted_output) else:", "result.persisted_output) def get_output(workflow_id: str, name: Optional[str]) -> ray.ObjectRef: \"\"\"Get the", "ray.experimental.workflow.step_executor import WorkflowExecutionResult logger = logging.getLogger(__name__) def run(entry_workflow: Workflow, workflow_id:", "# checkpoint the workflow ws = workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\", entry_workflow)", "= get_management_actor() except ValueError: workflow_manager = None if workflow_manager is" ]
[ "from .compose import Compose # noqa from .signature import BinarySignature,", "from .signature import BinarySignature, Signature, MemorySignature # noqa from .username", "import Compose # noqa from .signature import BinarySignature, Signature, MemorySignature", "Compose # noqa from .signature import BinarySignature, Signature, MemorySignature #", ".compose import Compose # noqa from .signature import BinarySignature, Signature,", "import BinarySignature, Signature, MemorySignature # noqa from .username import UsernameToken", "Signature, MemorySignature # noqa from .username import UsernameToken # noqa", "noqa from .signature import BinarySignature, Signature, MemorySignature # noqa from", "<reponame>bertonha/python-zeep from .compose import Compose # noqa from .signature import", "# noqa from .signature import BinarySignature, Signature, MemorySignature # noqa", "BinarySignature, Signature, MemorySignature # noqa from .username import UsernameToken #", ".signature import BinarySignature, Signature, MemorySignature # noqa from .username import" ]
[ "x,y,v_x,v_y = u r=np.hypot(x,y) #r= 1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3 return", "u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0 for k in range", "0.0, 10, 10]) sol_rk4=RK4_int(orbit,y0,t) x,y,v_x,v_y = sol_rk4.T plt.grid() plt.plot(x,y) plt.show()", "coding: utf-8 -*- import numpy as np import matplotlib.pyplot as", "return u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0 for k in", "1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt", "=y0 for k in range (1,len(tspan)): y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1]) return", "np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6 def", "M,G=20,110 f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt", "as np import matplotlib.pyplot as plt def orbit(u): x,y,v_x,v_y =", "numpy as np import matplotlib.pyplot as plt def orbit(u): x,y,v_x,v_y", "in range (1,len(tspan)): y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y dt=0.1 t", "k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0 for", "range (1,len(tspan)): y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y dt=0.1 t =", "for k in range (1,len(tspan)): y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y", "# -*- coding: utf-8 -*- import numpy as np import", "(1,len(tspan)): y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y dt=0.1 t = np.arange(0,10,dt)", "np import matplotlib.pyplot as plt def orbit(u): x,y,v_x,v_y = u", "= RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y dt=0.1 t = np.arange(0,10,dt) y0=np.array([10, 0.0,", "k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0 for k", "def RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan):", "return np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6", "k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0", "-*- coding: utf-8 -*- import numpy as np import matplotlib.pyplot", "import numpy as np import matplotlib.pyplot as plt def orbit(u):", "#r= 1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt): k1=f(u)*dt", "#M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt", "k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:]", "as plt def orbit(u): x,y,v_x,v_y = u r=np.hypot(x,y) #r= 1.521e+06", "<reponame>peteboi/Python-Scripts<gh_stars>0 # -*- coding: utf-8 -*- import numpy as np", "orbit(u): x,y,v_x,v_y = u r=np.hypot(x,y) #r= 1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3", "RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0 for k in range (1,len(tspan)): y[k,:]", "y dt=0.1 t = np.arange(0,10,dt) y0=np.array([10, 0.0, 10, 10]) sol_rk4=RK4_int(orbit,y0,t)", "= np.arange(0,10,dt) y0=np.array([10, 0.0, 10, 10]) sol_rk4=RK4_int(orbit,y0,t) x,y,v_x,v_y = sol_rk4.T", "utf-8 -*- import numpy as np import matplotlib.pyplot as plt", "RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y dt=0.1 t = np.arange(0,10,dt) y0=np.array([10, 0.0, 10,", "np.arange(0,10,dt) y0=np.array([10, 0.0, 10, 10]) sol_rk4=RK4_int(orbit,y0,t) x,y,v_x,v_y = sol_rk4.T plt.grid()", "-*- import numpy as np import matplotlib.pyplot as plt def", "dt=0.1 t = np.arange(0,10,dt) y0=np.array([10, 0.0, 10, 10]) sol_rk4=RK4_int(orbit,y0,t) x,y,v_x,v_y", "= u r=np.hypot(x,y) #r= 1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y])", "def orbit(u): x,y,v_x,v_y = u r=np.hypot(x,y) #r= 1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110", "y0=np.array([10, 0.0, 10, 10]) sol_rk4=RK4_int(orbit,y0,t) x,y,v_x,v_y = sol_rk4.T plt.grid() plt.plot(x,y)", "return y dt=0.1 t = np.arange(0,10,dt) y0=np.array([10, 0.0, 10, 10])", "plt def orbit(u): x,y,v_x,v_y = u r=np.hypot(x,y) #r= 1.521e+06 #M,G=1.989e+30,6.7e-11", "u r=np.hypot(x,y) #r= 1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y]) def", "y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0 for k in range (1,len(tspan)): y[k,:] =", "y[0,:] =y0 for k in range (1,len(tspan)): y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1])", "import matplotlib.pyplot as plt def orbit(u): x,y,v_x,v_y = u r=np.hypot(x,y)", "def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0 for k in range (1,len(tspan)):", "matplotlib.pyplot as plt def orbit(u): x,y,v_x,v_y = u r=np.hypot(x,y) #r=", "RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)])", "y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y dt=0.1 t = np.arange(0,10,dt) y0=np.array([10,", "t = np.arange(0,10,dt) y0=np.array([10, 0.0, 10, 10]) sol_rk4=RK4_int(orbit,y0,t) x,y,v_x,v_y =", "k in range (1,len(tspan)): y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y dt=0.1", "r=np.hypot(x,y) #r= 1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt):", "f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return" ]
[ "in apps.get_app_configs(): factory_app_generator = FactoryAppGenerator(app) created_files += factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created", "+= factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created factories:')) for created_file in created_files: self.stdout.write(self.style.SUCCESS('-", "model factories for all installed apps' def handle(self, *args, **options):", "import os from django.apps import apps from django.core.management.base import BaseCommand", "from factory_generator.generator import FactoryAppGenerator class Command(BaseCommand): help = 'Create model", "from django.core.management.base import BaseCommand from factory_generator.generator import FactoryAppGenerator class Command(BaseCommand):", "factory_app_generator = FactoryAppGenerator(app) created_files += factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created factories:')) for", "import BaseCommand from factory_generator.generator import FactoryAppGenerator class Command(BaseCommand): help =", "*args, **options): created_files = [] for app in apps.get_app_configs(): factory_app_generator", "BaseCommand from factory_generator.generator import FactoryAppGenerator class Command(BaseCommand): help = 'Create", "factories for all installed apps' def handle(self, *args, **options): created_files", "handle(self, *args, **options): created_files = [] for app in apps.get_app_configs():", "for app in apps.get_app_configs(): factory_app_generator = FactoryAppGenerator(app) created_files += factory_app_generator.create_files()", "= FactoryAppGenerator(app) created_files += factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created factories:')) for created_file", "django.apps import apps from django.core.management.base import BaseCommand from factory_generator.generator import", "from django.apps import apps from django.core.management.base import BaseCommand from factory_generator.generator", "import FactoryAppGenerator class Command(BaseCommand): help = 'Create model factories for", "Command(BaseCommand): help = 'Create model factories for all installed apps'", "FactoryAppGenerator class Command(BaseCommand): help = 'Create model factories for all", "django.core.management.base import BaseCommand from factory_generator.generator import FactoryAppGenerator class Command(BaseCommand): help", "def handle(self, *args, **options): created_files = [] for app in", "created_files += factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created factories:')) for created_file in created_files:", "created factories:')) for created_file in created_files: self.stdout.write(self.style.SUCCESS('- ' + created_file))", "apps.get_app_configs(): factory_app_generator = FactoryAppGenerator(app) created_files += factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created factories:'))", "FactoryAppGenerator(app) created_files += factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created factories:')) for created_file in", "self.stdout.write(self.style.SUCCESS('Successfully created factories:')) for created_file in created_files: self.stdout.write(self.style.SUCCESS('- ' +", "= [] for app in apps.get_app_configs(): factory_app_generator = FactoryAppGenerator(app) created_files", "installed apps' def handle(self, *args, **options): created_files = [] for", "factory_generator.generator import FactoryAppGenerator class Command(BaseCommand): help = 'Create model factories", "apps' def handle(self, *args, **options): created_files = [] for app", "app in apps.get_app_configs(): factory_app_generator = FactoryAppGenerator(app) created_files += factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully", "help = 'Create model factories for all installed apps' def", "**options): created_files = [] for app in apps.get_app_configs(): factory_app_generator =", "import apps from django.core.management.base import BaseCommand from factory_generator.generator import FactoryAppGenerator", "<reponame>gamabounty/django-factory-generator import os from django.apps import apps from django.core.management.base import", "class Command(BaseCommand): help = 'Create model factories for all installed", "for all installed apps' def handle(self, *args, **options): created_files =", "all installed apps' def handle(self, *args, **options): created_files = []", "= 'Create model factories for all installed apps' def handle(self,", "[] for app in apps.get_app_configs(): factory_app_generator = FactoryAppGenerator(app) created_files +=", "'Create model factories for all installed apps' def handle(self, *args,", "os from django.apps import apps from django.core.management.base import BaseCommand from", "apps from django.core.management.base import BaseCommand from factory_generator.generator import FactoryAppGenerator class", "created_files = [] for app in apps.get_app_configs(): factory_app_generator = FactoryAppGenerator(app)", "factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created factories:')) for created_file in created_files: self.stdout.write(self.style.SUCCESS('- '" ]
[ "if args.skip: return {'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks = [] #", "chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP, analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP]) for csv_dir in chunk_csv_dirs: cr_io.copytree(csv_dir,", "analysis_io.open_h5_for_writing(outs.diffexp_h5) as f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix, outs.diffexp_csv)", "chunk_csv_dirs = [chunk_out.diffexp_csv for chunk_out in chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP,", "#TODO Not clear why this stage takes > 1 thread.", "clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp = cr_diffexp.run_differential_expression(matrix, clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5)", "args.skip: return chunk_h5s = [chunk_out.diffexp_h5 for chunk_out in chunk_outs] chunk_csv_dirs", "cellranger.analysis.singlegenome import SingleGenomeAnalysis import cellranger.h5_constants as h5_constants import cellranger.analysis.constants as", "# import cellranger.analysis.diffexp as cr_diffexp import cellranger.analysis.io as analysis_io from", "4 #TODO Not clear why this stage takes > 1", "int max_clusters, out h5 diffexp_h5, out path diffexp_csv, src py", "cr_matrix import cellranger.io as cr_io import cellranger.library_constants as lib_constants NUM_THREADS_MIN", "give big jobs more threads in order to avoid overloading", "reserved. # import cellranger.analysis.diffexp as cr_diffexp import cellranger.analysis.io as analysis_io", "as analysis_constants import cellranger.matrix as cr_matrix import cellranger.io as cr_io", "chunk_outs): if args.skip: return chunk_h5s = [chunk_out.diffexp_h5 for chunk_out in", "= [chunk_out.diffexp_h5 for chunk_out in chunk_outs] chunk_csv_dirs = [chunk_out.diffexp_csv for", "key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key': key, '__mem_gb': chunk_mem_gb, '__threads': threads,", "def main(args, outs): if args.skip: return matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) #", "kills it on long jobs __MRO__ = \"\"\" stage RUN_DIFFERENTIAL_EXPRESSION(", "(c) 2017 10X Genomics, Inc. All rights reserved. # import", "clustering_h5, in bool skip, in int random_seed, in int max_clusters,", "in string clustering_key, ) \"\"\" def split(args): if args.skip: return", "from cellranger.analysis.singlegenome import SingleGenomeAnalysis import cellranger.h5_constants as h5_constants import cellranger.analysis.constants", "int random_seed, in int max_clusters, out h5 diffexp_h5, out path", "cellranger.h5_constants as h5_constants import cellranger.analysis.constants as analysis_constants import cellranger.matrix as", "import cellranger.analysis.diffexp as cr_diffexp import cellranger.analysis.io as analysis_io from cellranger.analysis.singlegenome", "'join': {'__mem_gb' : 1}} def main(args, outs): if args.skip: return", "= matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp = cr_diffexp.run_differential_expression(matrix, clustering.clusters)", "now, only compute for gene expression features matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE)", "cr_diffexp.run_differential_expression(matrix, clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5) as f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key,", "matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For now, only compute for gene", "import cellranger.matrix as cr_matrix import cellranger.io as cr_io import cellranger.library_constants", "thinks it does and kills it on long jobs __MRO__", "import cellranger.analysis.constants as analysis_constants import cellranger.matrix as cr_matrix import cellranger.io", "\"\"\" stage RUN_DIFFERENTIAL_EXPRESSION( in h5 matrix_h5, in h5 clustering_h5, in", "in h5 clustering_h5, in bool skip, in int random_seed, in", "order to avoid overloading a node threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN)", "main(args, outs): if args.skip: return matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For", "with analysis_io.open_h5_for_writing(outs.diffexp_h5) as f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix,", "cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix, outs.diffexp_csv) def join(args, outs,", "join(args, outs, chunk_defs, chunk_outs): if args.skip: return chunk_h5s = [chunk_out.diffexp_h5", "Copyright (c) 2017 10X Genomics, Inc. All rights reserved. #", "return {'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks = [] # FIXME: Add", ": 1}} def main(args, outs): if args.skip: return matrix =", "as cr_diffexp import cellranger.analysis.io as analysis_io from cellranger.analysis.singlegenome import SingleGenomeAnalysis", "node threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads = 4 for key", "SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key': key, '__mem_gb': chunk_mem_gb, '__threads': threads, }) return", "[chunk_out.diffexp_csv for chunk_out in chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP, analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP]) for", "# For now, only compute for gene expression features matrix", "split(args): if args.skip: return {'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks = []", "split using ( in string clustering_key, ) \"\"\" def split(args):", "[{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks = [] # FIXME: Add one for", "in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key': key, '__mem_gb': chunk_mem_gb, '__threads': threads, })", "- give big jobs more threads in order to avoid", "( in string clustering_key, ) \"\"\" def split(args): if args.skip:", "import cellranger.library_constants as lib_constants NUM_THREADS_MIN = 4 #TODO Not clear", "Genomics, Inc. All rights reserved. # import cellranger.analysis.diffexp as cr_diffexp", "chunks = [] # FIXME: Add one for reasons unknown", "outs): if args.skip: return matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For now,", "bool skip, in int random_seed, in int max_clusters, out h5", "on long jobs __MRO__ = \"\"\" stage RUN_DIFFERENTIAL_EXPRESSION( in h5", "src py \"stages/analyzer/run_differential_expression\", ) split using ( in string clustering_key,", "NUM_THREADS_MIN = 4 #TODO Not clear why this stage takes", "h5 clustering_h5, in bool skip, in int random_seed, in int", "chunk_outs] chunk_csv_dirs = [chunk_out.diffexp_csv for chunk_out in chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5,", "threads = 4 for key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key': key,", "1}} def main(args, outs): if args.skip: return matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5)", "'__mem_gb': chunk_mem_gb, '__threads': threads, }) return {'chunks': chunks, 'join': {'__mem_gb'", "in int max_clusters, out h5 diffexp_h5, out path diffexp_csv, src", "min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads = 4 for key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({", "as f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix, outs.diffexp_csv) def", "Not clear why this stage takes > 1 thread. Martian", "in bool skip, in int random_seed, in int max_clusters, out", "Martian thinks it does and kills it on long jobs", "in order to avoid overloading a node threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb),", "if args.skip: return chunk_h5s = [chunk_out.diffexp_h5 for chunk_out in chunk_outs]", "2017 10X Genomics, Inc. All rights reserved. # import cellranger.analysis.diffexp", "cellranger.library_constants as lib_constants NUM_THREADS_MIN = 4 #TODO Not clear why", "= 4 #TODO Not clear why this stage takes >", "int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) # HACK - give big jobs more threads", "h5_constants import cellranger.analysis.constants as analysis_constants import cellranger.matrix as cr_matrix import", "diffexp_csv, src py \"stages/analyzer/run_differential_expression\", ) split using ( in string", "key, '__mem_gb': chunk_mem_gb, '__threads': threads, }) return {'chunks': chunks, 'join':", "def split(args): if args.skip: return {'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks =", "clustering_key, ) \"\"\" def split(args): if args.skip: return {'chunks': [{'__mem_gb':", "cellranger.analysis.diffexp as cr_diffexp import cellranger.analysis.io as analysis_io from cellranger.analysis.singlegenome import", "matrix, outs.diffexp_csv) def join(args, outs, chunk_defs, chunk_outs): if args.skip: return", "chunks, 'join': {'__mem_gb' : 1}} def main(args, outs): if args.skip:", "for gene expression features matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5,", "out h5 diffexp_h5, out path diffexp_csv, src py \"stages/analyzer/run_differential_expression\", )", "chunk_mem_gb, '__threads': threads, }) return {'chunks': chunks, 'join': {'__mem_gb' :", ") split using ( in string clustering_key, ) \"\"\" def", "it on long jobs __MRO__ = \"\"\" stage RUN_DIFFERENTIAL_EXPRESSION( in", "1 thread. Martian thinks it does and kills it on", "1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb = int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) # HACK -", "matrix_h5, in h5 clustering_h5, in bool skip, in int random_seed,", "__MRO__ = \"\"\" stage RUN_DIFFERENTIAL_EXPRESSION( in h5 matrix_h5, in h5", "args.skip: return matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For now, only compute", "h5 diffexp_h5, out path diffexp_csv, src py \"stages/analyzer/run_differential_expression\", ) split", "as lib_constants NUM_THREADS_MIN = 4 #TODO Not clear why this", "more threads in order to avoid overloading a node threads", "analysis_io from cellranger.analysis.singlegenome import SingleGenomeAnalysis import cellranger.h5_constants as h5_constants import", "why this stage takes > 1 thread. Martian thinks it", "diffexp_h5, out path diffexp_csv, src py \"stages/analyzer/run_differential_expression\", ) split using", "10X Genomics, Inc. All rights reserved. # import cellranger.analysis.diffexp as", "'clustering_key': key, '__mem_gb': chunk_mem_gb, '__threads': threads, }) return {'chunks': chunks,", "using ( in string clustering_key, ) \"\"\" def split(args): if", "'__threads': threads, }) return {'chunks': chunks, 'join': {'__mem_gb' : 1}}", "in chunk_outs] chunk_csv_dirs = [chunk_out.diffexp_csv for chunk_out in chunk_outs] analysis_io.combine_h5_files(chunk_h5s,", "= 1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb = int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) # HACK", "cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For now, only compute for gene expression features", "import SingleGenomeAnalysis import cellranger.h5_constants as h5_constants import cellranger.analysis.constants as analysis_constants", "as analysis_io from cellranger.analysis.singlegenome import SingleGenomeAnalysis import cellranger.h5_constants as h5_constants", "rights reserved. # import cellranger.analysis.diffexp as cr_diffexp import cellranger.analysis.io as", "takes > 1 thread. Martian thinks it does and kills", "for key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key': key, '__mem_gb': chunk_mem_gb, '__threads':", "SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp = cr_diffexp.run_differential_expression(matrix, clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5) as f:", "outs, chunk_defs, chunk_outs): if args.skip: return chunk_h5s = [chunk_out.diffexp_h5 for", "gene expression features matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key)", "for chunk_out in chunk_outs] chunk_csv_dirs = [chunk_out.diffexp_csv for chunk_out in", "chunk_out in chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP, analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP]) for csv_dir in", "clear why this stage takes > 1 thread. Martian thinks", "# FIXME: Add one for reasons unknown matrix_mem_gb = 1.8", "a node threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads = 4 for", "cr_diffexp import cellranger.analysis.io as analysis_io from cellranger.analysis.singlegenome import SingleGenomeAnalysis import", "chunk_defs, chunk_outs): if args.skip: return chunk_h5s = [chunk_out.diffexp_h5 for chunk_out", "return {'chunks': chunks, 'join': {'__mem_gb' : 1}} def main(args, outs):", "4 for key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key': key, '__mem_gb': chunk_mem_gb,", "= \"\"\" stage RUN_DIFFERENTIAL_EXPRESSION( in h5 matrix_h5, in h5 clustering_h5,", "out path diffexp_csv, src py \"stages/analyzer/run_differential_expression\", ) split using (", "= cr_diffexp.run_differential_expression(matrix, clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5) as f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp)", "h5_constants.MIN_MEM_GB)) # HACK - give big jobs more threads in", "string clustering_key, ) \"\"\" def split(args): if args.skip: return {'chunks':", "diffexp = cr_diffexp.run_differential_expression(matrix, clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5) as f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key,", "one for reasons unknown matrix_mem_gb = 1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb", "long jobs __MRO__ = \"\"\" stage RUN_DIFFERENTIAL_EXPRESSION( in h5 matrix_h5,", "{'__mem_gb' : 1}} def main(args, outs): if args.skip: return matrix", "compute for gene expression features matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering =", "features matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp =", "thread. Martian thinks it does and kills it on long", "* cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb = int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) # HACK - give", "max_clusters, out h5 diffexp_h5, out path diffexp_csv, src py \"stages/analyzer/run_differential_expression\",", "chunk_mem_gb = int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) # HACK - give big jobs", "[chunk_out.diffexp_h5 for chunk_out in chunk_outs] chunk_csv_dirs = [chunk_out.diffexp_csv for chunk_out", "py \"stages/analyzer/run_differential_expression\", ) split using ( in string clustering_key, )", "= int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) # HACK - give big jobs more", "args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix, outs.diffexp_csv) def join(args, outs, chunk_defs,", "# Copyright (c) 2017 10X Genomics, Inc. All rights reserved.", "chunk_h5s = [chunk_out.diffexp_h5 for chunk_out in chunk_outs] chunk_csv_dirs = [chunk_out.diffexp_csv", "= [chunk_out.diffexp_csv for chunk_out in chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP, analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP])", "as cr_io import cellranger.library_constants as lib_constants NUM_THREADS_MIN = 4 #TODO", "SingleGenomeAnalysis import cellranger.h5_constants as h5_constants import cellranger.analysis.constants as analysis_constants import", "= SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp = cr_diffexp.run_differential_expression(matrix, clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5) as", "cellranger.io as cr_io import cellranger.library_constants as lib_constants NUM_THREADS_MIN = 4", "return matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For now, only compute for", "only compute for gene expression features matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering", "path diffexp_csv, src py \"stages/analyzer/run_differential_expression\", ) split using ( in", "matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp = cr_diffexp.run_differential_expression(matrix,", "import cellranger.io as cr_io import cellranger.library_constants as lib_constants NUM_THREADS_MIN =", "in chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP, analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP]) for csv_dir in chunk_csv_dirs:", "in h5 matrix_h5, in h5 clustering_h5, in bool skip, in", "chunks.append({ 'clustering_key': key, '__mem_gb': chunk_mem_gb, '__threads': threads, }) return {'chunks':", "NUM_THREADS_MIN) threads = 4 for key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key':", "big jobs more threads in order to avoid overloading a", "diffexp, matrix, outs.diffexp_csv) def join(args, outs, chunk_defs, chunk_outs): if args.skip:", "unknown matrix_mem_gb = 1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb = int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB))", "threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads = 4 for key in", "this stage takes > 1 thread. Martian thinks it does", "jobs __MRO__ = \"\"\" stage RUN_DIFFERENTIAL_EXPRESSION( in h5 matrix_h5, in", "analysis_constants import cellranger.matrix as cr_matrix import cellranger.io as cr_io import", "for reasons unknown matrix_mem_gb = 1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb =", "All rights reserved. # import cellranger.analysis.diffexp as cr_diffexp import cellranger.analysis.io", "avoid overloading a node threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads =", "Add one for reasons unknown matrix_mem_gb = 1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5)", "and kills it on long jobs __MRO__ = \"\"\" stage", "import cellranger.analysis.io as analysis_io from cellranger.analysis.singlegenome import SingleGenomeAnalysis import cellranger.h5_constants", "args.skip: return {'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks = [] # FIXME:", "# HACK - give big jobs more threads in order", "= cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For now, only compute for gene expression", "diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix, outs.diffexp_csv) def join(args, outs, chunk_defs, chunk_outs):", "lib_constants NUM_THREADS_MIN = 4 #TODO Not clear why this stage", "= [] # FIXME: Add one for reasons unknown matrix_mem_gb", "}) return {'chunks': chunks, 'join': {'__mem_gb' : 1}} def main(args,", "cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix, outs.diffexp_csv) def join(args, outs, chunk_defs, chunk_outs): if", "def join(args, outs, chunk_defs, chunk_outs): if args.skip: return chunk_h5s =", "<reponame>qiangli/cellranger<filename>mro/stages/analyzer/run_differential_expression/__init__.py #!/usr/bin/env python # # Copyright (c) 2017 10X Genomics,", "f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix, outs.diffexp_csv) def join(args,", "\"stages/analyzer/run_differential_expression\", ) split using ( in string clustering_key, ) \"\"\"", "# # Copyright (c) 2017 10X Genomics, Inc. All rights", "[] # FIXME: Add one for reasons unknown matrix_mem_gb =", "if args.skip: return matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For now, only", "jobs more threads in order to avoid overloading a node", "reasons unknown matrix_mem_gb = 1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb = int(max(matrix_mem_gb,", "h5_constants.MIN_MEM_GB}]} chunks = [] # FIXME: Add one for reasons", "= min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads = 4 for key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5):", "outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP, analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP]) for csv_dir in chunk_csv_dirs: cr_io.copytree(csv_dir, outs.diffexp_csv, allow_existing=True)", "return chunk_h5s = [chunk_out.diffexp_h5 for chunk_out in chunk_outs] chunk_csv_dirs =", "cellranger.analysis.constants as analysis_constants import cellranger.matrix as cr_matrix import cellranger.io as", "for chunk_out in chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP, analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP]) for csv_dir", "overloading a node threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads = 4", "#!/usr/bin/env python # # Copyright (c) 2017 10X Genomics, Inc.", "it does and kills it on long jobs __MRO__ =", "clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5) as f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp,", "import cellranger.h5_constants as h5_constants import cellranger.analysis.constants as analysis_constants import cellranger.matrix", "cellranger.analysis.io as analysis_io from cellranger.analysis.singlegenome import SingleGenomeAnalysis import cellranger.h5_constants as", "> 1 thread. Martian thinks it does and kills it", "args.clustering_key) diffexp = cr_diffexp.run_differential_expression(matrix, clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5) as f: cr_diffexp.save_differential_expression_h5(f,", "RUN_DIFFERENTIAL_EXPRESSION( in h5 matrix_h5, in h5 clustering_h5, in bool skip,", "skip, in int random_seed, in int max_clusters, out h5 diffexp_h5,", "in int random_seed, in int max_clusters, out h5 diffexp_h5, out", "random_seed, in int max_clusters, out h5 diffexp_h5, out path diffexp_csv,", "as h5_constants import cellranger.analysis.constants as analysis_constants import cellranger.matrix as cr_matrix", "matrix_mem_gb = 1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb = int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) #", "expression features matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp", "cr_io import cellranger.library_constants as lib_constants NUM_THREADS_MIN = 4 #TODO Not", "Inc. All rights reserved. # import cellranger.analysis.diffexp as cr_diffexp import", ") \"\"\" def split(args): if args.skip: return {'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]}", "cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb = int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) # HACK - give big", "does and kills it on long jobs __MRO__ = \"\"\"", "matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp = cr_diffexp.run_differential_expression(matrix, clustering.clusters) with", "threads in order to avoid overloading a node threads =", "= 4 for key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key': key, '__mem_gb':", "h5 matrix_h5, in h5 clustering_h5, in bool skip, in int", "For now, only compute for gene expression features matrix =", "stage takes > 1 thread. Martian thinks it does and", "{'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks = [] # FIXME: Add one", "to avoid overloading a node threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads", "chunk_out in chunk_outs] chunk_csv_dirs = [chunk_out.diffexp_csv for chunk_out in chunk_outs]", "threads, }) return {'chunks': chunks, 'join': {'__mem_gb' : 1}} def", "\"\"\" def split(args): if args.skip: return {'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks", "python # # Copyright (c) 2017 10X Genomics, Inc. All", "outs.diffexp_csv) def join(args, outs, chunk_defs, chunk_outs): if args.skip: return chunk_h5s", "HACK - give big jobs more threads in order to", "{'chunks': chunks, 'join': {'__mem_gb' : 1}} def main(args, outs): if", "FIXME: Add one for reasons unknown matrix_mem_gb = 1.8 *", "analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP, analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP]) for csv_dir in chunk_csv_dirs: cr_io.copytree(csv_dir, outs.diffexp_csv,", "as cr_matrix import cellranger.io as cr_io import cellranger.library_constants as lib_constants", "cellranger.matrix as cr_matrix import cellranger.io as cr_io import cellranger.library_constants as", "stage RUN_DIFFERENTIAL_EXPRESSION( in h5 matrix_h5, in h5 clustering_h5, in bool" ]
[ "permutations as sets. This is used for testing purposes. Since", "import get_symmetric_group_sgs >>> from sympy.combinatorics import Permutation, PermutationGroup >>> g", "size - 2)) S = PermutationGroup(sgens) D = PermutationGroup([Permutation(x) for", "= v[i] v1.append((base_i, gens_i, [[]]*n_i, sym_i)) size, sbase, sgens =", "of indices equal to the degree of the vertex; indices", "import Permutation >>> from sympy.combinatorics.testutil import _verify_centralizer >>> S =", "in other.generators] commutes_with_gens = lambda x: all(_af_commutes_with(x, gen) for gen", ">>> _cmp_perm_lists(ls1, ls2) True \"\"\" return {tuple(a) for a in", "n_i, sym_i = v[i] v1.append((base_i, gens_i, [[]]*n_i, sym_i)) size, sbase,", "from sympy.combinatorics.testutil import graph_certificate >>> gr1 = {0:[1, 2, 3,", "v.append((base, gens, n, 0)) v.reverse() dummies = list(range(num_indices)) can =", "... AlternatingGroup) >>> from sympy.combinatorics.perm_groups import PermutationGroup >>> from sympy.combinatorics.permutations", "= arg.generators elif hasattr(arg, '__getitem__'): subgr_gens = arg elif hasattr(arg,", "another group. This is used for testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups``", "from sympy.combinatorics.testutil import _naive_list_centralizer >>> from sympy.combinatorics.named_groups import DihedralGroup >>>", "candidate.order(): return False current_stabilizer = current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() != 1:", "of the group and checks for membership in the centralizer.", "= lambda x: all(_af_commutes_with(x, gen) for gen in gens) centralizer_list", "lists of permutations as sets. This is used for testing", "assert sorted(g) == list(range(size)) g = Permutation(g) vlen = [0]*(len(vertices[0])+1)", "+= 1 v = [] for i in range(len(vlen)): n", "symmetry under exchange of two component tensors of type `i`", ">>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _cmp_perm_lists", "inside another group. This is used for testing ``.centralizer()`` from", "current_stabilizer.order() != candidate.order(): return False current_stabilizer = current_stabilizer.stabilizer(base[i]) if current_stabilizer.order()", "closure is None: closure = group.normal_closure(arg) conjugates = set() if", "def canonicalize_naive(g, dummies, sym, *v): \"\"\" Canonicalize tensor formed by", "from sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>> from sympy.combinatorics import Permutation, PermutationGroup", "2, 4, 5], 4:[1, 3, 5], 5:[0, 2, 3, 4]}", "the other vertex vertices = [[] for i in items]", "2 assert sorted(g) == list(range(size)) g = Permutation(g) vlen =", "of this type n_i number ot tensors of type `i`", "import PermutationGroup \"\"\" Return a list of elements for the", "= [x._array_form for x in other.generators] commutes_with_gens = lambda x:", "= Permutation.rmul def _cmp_perm_lists(first, second): \"\"\" Compare two lists of", "no symmetry 0 commuting 1 anticommuting Return 0 if the", "other.generators] commutes_with_gens = lambda x: all(_af_commutes_with(x, gen) for gen in", "equal to the degree of the vertex; indices are contracted", "{tuple(a) for a in second} def _naive_list_centralizer(self, other, af=False): from", "from sympy.combinatorics import Permutation from sympy.combinatorics.util import _distribute_gens_by_base rmul =", "1, 3, 4], 3:[0, 2, 4], 4:[1, 2, 3, 5],", "first} == \\ {tuple(a) for a in second} def _naive_list_centralizer(self,", "centr = group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group, arg,", "other vertex vertices = [[] for i in items] i", "currently a list, Permutation is not hashable and cannot be", "af) elif hasattr(other, 'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]), af) def _verify_bsgs(group,", "st.add(q) a = list(st) a.sort() prev = (0,)*size for h", "x in dgens]) dlist = list(D.generate(af=True)) g = g.array_form st", "for more robust testing. Examples ======== >>> from sympy.combinatorics.named_groups import", "sets. This is used for testing purposes. Since the array", "form of a permutation is currently a list, Permutation is", "a group/set/element inside another group. This is used for testing", "neigh in vertices: vlen[len(neigh)] += 1 v = [] for", "i in range(len(base)): candidate = PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() != candidate.order():", "sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _cmp_perm_lists >>> a", "_naive_list_centralizer(group, arg, af=True) return _cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group, arg, closure=None):", "sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _verify_centralizer >>> S", "testing purposes. Since the array form of a permutation is", "True \"\"\" return {tuple(a) for a in first} == \\", "1: return False return True def _verify_centralizer(group, arg, centr=None): \"\"\"", "in neigh: if pvert[v] < pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i +=", "v2 in neigh: if pvert[v] < pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i", "sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Verify the normal closure of a", "canonicalize_naive(g, [2, 3], 0, (base2, gens2, 2, 0)) [0, 2,", "from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.perm_groups import", "from sympy.combinatorics.named_groups import DihedralGroup >>> D = DihedralGroup(4) >>> _naive_list_centralizer(D,", "el in group.generate_dimino(): for gen in subgr_gens: conjugates.add(gen ^ el)", "0 for v, neigh in items: num_indices += len(neigh) #", "sympy.combinatorics.permutations import _af_invert from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize items =", "strong generating set relative to it. There are other procedures", "^ el) naive_closure = PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies,", "base_i, gens_i BSGS for tensors of this type n_i number", "n_i number ot tensors of type `i` sym_i symmetry under", "= list(gr.items()) items.sort(key=lambda x: len(x[1]), reverse=True) pvert = [x[0] for", "for the graph gr adjacency list The graph is assumed", "set. Examples ======== >>> from sympy.combinatorics.permutations import Permutation >>> from", "verifying a base and strong generating set, but this one", "\"\"\" Verify the normal closure of a subgroup/subset/element in a", "conjugates.add(gen ^ el) naive_closure = PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def canonicalize_naive(g,", "second} def _naive_list_centralizer(self, other, af=False): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\"", "of a subgroup/subset/element in a group. This is used to", "list(self.generate_dimino(af=True)) gens = [x._array_form for x in other.generators] commutes_with_gens =", "2]) >>> ls1 = [a, b, c] >>> ls2 =", ">>> from sympy.combinatorics.testutil import _cmp_perm_lists >>> a = Permutation([0, 2,", "import PermutationGroup \"\"\" Verify the normal closure of a subgroup/subset/element", "elements: if commutes_with_gens(element): centralizer_list.append(element) return centralizer_list elif hasattr(other, 'getitem'): return", ">>> S = SymmetricGroup(5) >>> A = AlternatingGroup(5) >>> centr", "set. This is a naive implementation using the definition of", "base and strong generating set, but this one will serve", "elif hasattr(arg, '__getitem__'): subgr_gens = arg elif hasattr(arg, 'array_form'): subgr_gens", "AlternatingGroup(3) >>> _verify_normal_closure(S, A, closure=A) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure", "3, 4]} >>> c1 = graph_certificate(gr1) >>> c2 = graph_certificate(gr2)", "v, neigh in items: num_indices += len(neigh) # associate to", "and without external lines. Associate to each vertex of the", "and checks for membership in the centralizer. It is used", "gens_i BSGS for tensors of this type n_i number ot", ">>> from sympy.combinatorics.testutil import _verify_bsgs >>> A = AlternatingGroup(4) >>>", "_cmp_perm_lists(ls1, ls2) True \"\"\" return {tuple(a) for a in first}", "canonicalize items = list(gr.items()) items.sort(key=lambda x: len(x[1]), reverse=True) pvert =", "sympy.combinatorics.testutil import _verify_centralizer >>> S = SymmetricGroup(5) >>> A =", "3, 4])]) >>> _verify_centralizer(S, A, centr) True See Also ========", "AlternatingGroup) >>> from sympy.combinatorics.testutil import _verify_normal_closure >>> S = SymmetricGroup(3)", "(0,)*size for h in a: if h[:-2] == prev[:-2]: if", "import _af_commutes_with if hasattr(other, 'generators'): elements = list(self.generate_dimino(af=True)) gens =", "Permutation >>> from sympy.combinatorics.testutil import _verify_centralizer >>> S = SymmetricGroup(5)", "representing the canonical form of the tensor. Examples ======== >>>", "efficient algorithm to get the certificate of a graph. Examples", "17, 13, 19, 20, 21] >>> c1 == c2 True", "the odd index to the other vertex vertices = [[]", "group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group, arg, af=True) return", "from ``sympy.combinatorics.perm_groups`` Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ...", "_verify_normal_closure(group, arg, closure=None): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Verify the", "= SymmetricGroup(3) >>> A = AlternatingGroup(3) >>> _verify_normal_closure(S, A, closure=A)", "for i in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size - 2)) S", "1] size = num_indices + 2 assert sorted(g) == list(range(size))", "to test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples ======== >>> from sympy.combinatorics.testutil", "This is not an efficient algorithm to get the certificate", "lambda x: all(_af_commutes_with(x, gen) for gen in gens) centralizer_list =", "Examples ======== >>> from sympy.combinatorics.testutil import canonicalize_naive >>> from sympy.combinatorics.tensor_can", "from sympy.combinatorics.testutil import _cmp_perm_lists >>> a = Permutation([0, 2, 3,", "else: for element in elements: if commutes_with_gens(element): centralizer_list.append(element) return centralizer_list", "= _af_rmul(g, s) for d in dlist: q = tuple(_af_rmul(d,", "in subgr_gens: conjugates.add(gen ^ el) naive_closure = PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure)", "== \\ {tuple(a) for a in second} def _naive_list_centralizer(self, other,", "candidate = PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() != candidate.order(): return False current_stabilizer", "canonical form of the tensor gives a certificate for the", "this one will serve for more robust testing. Examples ========", "used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>> from sympy.combinatorics.named_groups import", "zero, else return the array form of the permutation representing", "the vertex; indices are contracted when they correspond to the", "tensor are twice the number of lines of the graph", ">>> from sympy.combinatorics.testutil import _verify_normal_closure >>> S = SymmetricGroup(3) >>>", "See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if closure is None: closure", "when they correspond to the same line of the graph.", "be put into a set. Examples ======== >>> from sympy.combinatorics.permutations", "not af: for element in elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else:", "_distribute_gens_by_base rmul = Permutation.rmul def _cmp_perm_lists(first, second): \"\"\" Compare two", "This is used for testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples ========", "certificate for the graph gr adjacency list The graph is", "of a base and a strong generating set relative to", "centralizer of a subgroup/set/element. This is a brute force implementation", "A = AlternatingGroup(4) >>> A.schreier_sims() >>> _verify_bsgs(A, A.base, A.strong_gens) True", "items: num_indices += len(neigh) # associate to each vertex its", "PermutationGroup(sgens) D = PermutationGroup([Permutation(x) for x in dgens]) dlist =", "for the graph. This is not an efficient algorithm to", "current_stabilizer.order() != 1: return False return True def _verify_centralizer(group, arg,", "0)) [0, 2, 1, 3, 4, 5] \"\"\" from sympy.combinatorics.perm_groups", "+= 2 g = [] for v in vertices: g.extend(v)", "= vlen[i] if n: base, gens = get_symmetric_group_sgs(i) v.append((base, gens,", "a = Permutation([0, 2, 3, 4, 1]) >>> b =", "= list(st) a.sort() prev = (0,)*size for h in a:", "0)) v.reverse() dummies = list(range(num_indices)) can = canonicalize(g, dummies, 0,", "a certificate for the graph gr adjacency list The graph", "import canonicalize_naive >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>> from sympy.combinatorics", "dgens = dummy_sgs(dummies, sym, size-2) if isinstance(sym, int): num_types =", "======== >>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import", ">>> A = AlternatingGroup(5) >>> centr = PermutationGroup([Permutation([0, 1, 2,", "i in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size - 2)) S =", "purposes. Since the array form of a permutation is currently", "= Permutation([0, 2, 3, 4, 1]) >>> b = Permutation([1,", ">>> D = DihedralGroup(4) >>> _naive_list_centralizer(D, D) [Permutation([0, 1, 2,", "PermutationGroup from sympy.combinatorics.tensor_can import gens_products, dummy_sgs from sympy.combinatorics.permutations import Permutation,", "0, 4, 3]) >>> c = Permutation([3, 4, 0, 1,", "in group.generate_dimino(): for gen in subgr_gens: conjugates.add(gen ^ el) naive_closure", "else: num_types = len(sym) dgens = [] for i in", "for s in S.generate(af=True): h = _af_rmul(g, s) for d", "list The graph is assumed to be unoriented and without", "import _verify_normal_closure >>> S = SymmetricGroup(3) >>> A = AlternatingGroup(3)", "subgroup/set/element. This is a brute force implementation that goes over", "sympy.combinatorics import Permutation from sympy.combinatorics.util import _distribute_gens_by_base rmul = Permutation.rmul", "return {tuple(a) for a in first} == \\ {tuple(a) for", "gr1 = {0:[1, 2, 3, 5], 1:[0, 2, 4], 2:[0,", "v, neigh in items: for v2 in neigh: if pvert[v]", "over all elements of the group and checks for membership", "sym_i symmetry under exchange of two component tensors of type", "= h return list(a[0]) def graph_certificate(gr): \"\"\" Return a certificate", "a list of (base_i, gens_i, n_i, sym_i) for tensors of", "== prev[:-2]: if h[-1] != prev[-1]: return 0 prev =", "Permutation is not hashable and cannot be put into a", "line of the graph. The canonical form of the tensor", "= PermutationGroup([Permutation(x) for x in dgens]) dlist = list(D.generate(af=True)) g", "component tensors of type `i` None no symmetry 0 commuting", "graph. This is not an efficient algorithm to get the", "= [] for i in range(len(vlen)): n = vlen[i] if", "2, 3, 4])]) >>> _verify_centralizer(S, A, centr) True See Also", "= list(D.generate(af=True)) g = g.array_form st = set() for s", "of the vertex; indices are contracted when they correspond to", "S = SymmetricGroup(5) >>> A = AlternatingGroup(5) >>> centr =", "is a naive implementation using the definition of a base", "= SymmetricGroup(5) >>> A = AlternatingGroup(5) >>> centr = PermutationGroup([Permutation([0,", "test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ...", "this type n_i number ot tensors of type `i` sym_i", "return closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies, sym, *v): \"\"\" Canonicalize tensor", "group. This is used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>>", "i in range(len(v)): base_i, gens_i, n_i, sym_i = v[i] v1.append((base_i,", "= [] for i in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size -", "*v): \"\"\" Canonicalize tensor formed by tensors of the different", "with number of indices equal to the degree of the", "graph_certificate(gr1) >>> c2 = graph_certificate(gr2) >>> c1 [0, 2, 4,", "4, 5], 4:[1, 3, 5], 5:[0, 2, 3, 4]} >>>", "1, 2]) >>> ls1 = [a, b, c] >>> ls2", "as sets. This is used for testing purposes. Since the", "4, 0, 1, 2]) >>> ls1 = [a, b, c]", "the tensor is zero, else return the array form of", "x in items] pvert = _af_invert(pvert) # the indices of", "PermutationGroup([Permutation(x) for x in dgens]) dlist = list(D.generate(af=True)) g =", "other, af=False): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Return a list", "the graph a symmetric tensor with number of indices equal", "is not hashable and cannot be put into a set.", "list of dummy indices msym symmetry of the metric v", "arg, closure=None): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Verify the normal", "prev[:-2]: if h[-1] != prev[-1]: return 0 prev = h", "the array form of a permutation is currently a list,", "Permutation([0, 2, 3, 4, 1]) >>> b = Permutation([1, 2,", "if current_stabilizer.order() != 1: return False return True def _verify_centralizer(group,", "gens): \"\"\" Verify the correctness of a base and strong", "in items: for v2 in neigh: if pvert[v] < pvert[v2]:", "for v2 in neigh: if pvert[v] < pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1)", "= 1 dummies = [dummies] sym = [sym] else: num_types", "gens) centralizer_list = [] if not af: for element in", "= graph_certificate(gr2) >>> c1 [0, 2, 4, 6, 1, 8,", "import _af_invert from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize items = list(gr.items())", "in gens) centralizer_list = [] if not af: for element", "Also ======== sympy.combinatorics.perm_groups.centralizer \"\"\" from sympy.combinatorics.permutations import _af_commutes_with if hasattr(other,", "5]) >>> base2, gens2 = get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2, 3],", "4])]) >>> _verify_centralizer(S, A, centr) True See Also ======== _naive_list_centralizer,", "- 2)) S = PermutationGroup(sgens) D = PermutationGroup([Permutation(x) for x", "is not an efficient algorithm to get the certificate of", "4:[1, 3, 5], 5:[0, 2, 3, 4]} >>> c1 =", "(base2, gens2, 2, 0)) [0, 2, 1, 3, 4, 5]", "'getitem'): return _naive_list_centralizer(self, PermutationGroup(other), af) elif hasattr(other, 'array_form'): return _naive_list_centralizer(self,", "i in items] i = 0 for v, neigh in", "None: closure = group.normal_closure(arg) conjugates = set() if hasattr(arg, 'generators'):", "of a graph. Examples ======== >>> from sympy.combinatorics.testutil import graph_certificate", "tensor. Examples ======== >>> from sympy.combinatorics.testutil import canonicalize_naive >>> from", "pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i += 2 g = [] for", "# the odd index to the other vertex vertices =", "def _verify_normal_closure(group, arg, closure=None): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Verify", "tensor formed by tensors of the different types g permutation", "from ``sympy.combinatorics.perm_groups``. Examples ======== >>> from sympy.combinatorics.testutil import _naive_list_centralizer >>>", "_naive_list_centralizer(self, PermutationGroup(other), af) elif hasattr(other, 'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]), af)", "8, 10, 12, 3, 14, 16, 18, 5, 9, 15,", "from sympy.combinatorics.testutil import _verify_bsgs >>> A = AlternatingGroup(4) >>> A.schreier_sims()", "el) naive_closure = PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies, sym,", "definition of a base and a strong generating set relative", "x in other.generators] commutes_with_gens = lambda x: all(_af_commutes_with(x, gen) for", "is None: closure = group.normal_closure(arg) conjugates = set() if hasattr(arg,", "in range(len(base)): candidate = PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() != candidate.order(): return", "2, 0, 4, 3]) >>> c = Permutation([3, 4, 0,", "import _naive_list_centralizer >>> from sympy.combinatorics.named_groups import DihedralGroup >>> D =", "import _verify_centralizer >>> S = SymmetricGroup(5) >>> A = AlternatingGroup(5)", "for a in first} == \\ {tuple(a) for a in", "2, 3, 4, 1]) >>> b = Permutation([1, 2, 0,", "the normal closure of a subgroup/subset/element in a group. This", "= DihedralGroup(4) >>> _naive_list_centralizer(D, D) [Permutation([0, 1, 2, 3]), Permutation([2,", "= PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies, sym, *v): \"\"\"", "q = tuple(_af_rmul(d, h)) st.add(q) a = list(st) a.sort() prev", "3, 4], 3:[0, 2, 4], 4:[1, 2, 3, 5], 5:[0,", "_af_commutes_with if hasattr(other, 'generators'): elements = list(self.generate_dimino(af=True)) gens = [x._array_form", "5], 4:[1, 3, 5], 5:[0, 2, 3, 4]} >>> c1", "[] if not af: for element in elements: if commutes_with_gens(element):", "vertices assign the # even index to the vertex which", "from sympy.combinatorics.tensor_can import gens_products, dummy_sgs from sympy.combinatorics.permutations import Permutation, _af_rmul", "from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Verify the normal closure of", "Permutation([1, 2, 0, 4, 3]) >>> c = Permutation([3, 4,", "======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from", "relative to it. There are other procedures for verifying a", "def _cmp_perm_lists(first, second): \"\"\" Compare two lists of permutations as", "of type `i` sym_i symmetry under exchange of two component", "[0]*(len(vertices[0])+1) for neigh in vertices: vlen[len(neigh)] += 1 v =", "ls1 = [a, b, c] >>> ls2 = [b, c,", "S = SymmetricGroup(3) >>> A = AlternatingGroup(3) >>> _verify_normal_closure(S, A,", "generating set relative to it. There are other procedures for", "20, 21] >>> c1 == c2 True \"\"\" from sympy.combinatorics.permutations", ">>> b = Permutation([1, 2, 0, 4, 3]) >>> c", "generating set, but this one will serve for more robust", "is None: centr = group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True)) centr_list_naive =", "types g permutation representing the tensor dummies list of dummy", "\"\"\" from sympy.combinatorics.permutations import _af_invert from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize", "18, 5, 9, 15, 7, 11, 17, 13, 19, 20,", "return False current_stabilizer = current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() != 1: return", "return _cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group, arg, closure=None): from sympy.combinatorics.perm_groups import", "{0:[1, 2, 3, 5], 1:[0, 2, 4], 2:[0, 1, 3,", "+= len(neigh) # associate to each vertex its indices; for", "sym, *v): \"\"\" Canonicalize tensor formed by tensors of the", "g permutation representing the tensor dummies list of dummy indices", "to it. There are other procedures for verifying a base", "sym_i) for tensors of type `i` base_i, gens_i BSGS for", "5] \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup from sympy.combinatorics.tensor_can import gens_products,", "get_symmetric_group_sgs, canonicalize items = list(gr.items()) items.sort(key=lambda x: len(x[1]), reverse=True) pvert", "0 prev = h return list(a[0]) def graph_certificate(gr): \"\"\" Return", "1, 3, 4, 5] \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup from", "# the indices of the tensor are twice the number", "of two component tensors of type `i` None no symmetry", "serve for more robust testing. Examples ======== >>> from sympy.combinatorics.named_groups", "sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.testutil import _verify_normal_closure", "neigh in items: num_indices += len(neigh) # associate to each", "for i in items] i = 0 for v, neigh", "tensors of type `i` sym_i symmetry under exchange of two", "assign the # even index to the vertex which comes", "5], 5:[0, 2, 3, 4]} >>> c1 = graph_certificate(gr1) >>>", "import PermutationGroup from sympy.combinatorics.tensor_can import gens_products, dummy_sgs from sympy.combinatorics.permutations import", "and cannot be put into a set. Examples ======== >>>", "commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for element in elements: if commutes_with_gens(element): centralizer_list.append(element)", "closure=None): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Verify the normal closure", "[[]]*n_i, sym_i)) size, sbase, sgens = gens_products(*v1) dgens = dummy_sgs(dummies,", "is a list of (base_i, gens_i, n_i, sym_i) for tensors", "_naive_list_centralizer(self, other, af=False): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Return a", "vertices: g.extend(v) assert len(g) == num_indices g += [num_indices, num_indices", "base and strong generating set. This is a naive implementation", "if the tensor is zero, else return the array form", "for x in items] pvert = _af_invert(pvert) # the indices", "a naive implementation using the definition of a base and", "dummy_sgs from sympy.combinatorics.permutations import Permutation, _af_rmul v1 = [] for", "for i in range(len(v)): base_i, gens_i, n_i, sym_i = v[i]", "external lines. Associate to each vertex of the graph a", "list of elements for the centralizer of a subgroup/set/element. This", "base and a strong generating set relative to it. There", "a symmetric tensor with number of indices equal to the", "from sympy.combinatorics.perm_groups import PermutationGroup from sympy.combinatorics.tensor_can import gens_products, dummy_sgs from", "pvert = [x[0] for x in items] pvert = _af_invert(pvert)", "There are other procedures for verifying a base and strong", "two vertices assign the # even index to the vertex", "the tensor. Examples ======== >>> from sympy.combinatorics.testutil import canonicalize_naive >>>", "AlternatingGroup(4) >>> A.schreier_sims() >>> _verify_bsgs(A, A.base, A.strong_gens) True See Also", "= Permutation([1, 2, 0, 4, 3]) >>> c = Permutation([3,", "centralizer. It is used to test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples", "gens2, 2, 0)) [0, 2, 1, 3, 4, 5] \"\"\"", "\"\"\" Return a certificate for the graph gr adjacency list", ">>> from sympy.combinatorics.testutil import canonicalize_naive >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs", "_naive_list_centralizer(D, D) [Permutation([0, 1, 2, 3]), Permutation([2, 3, 0, 1])]", "= 0 for v, neigh in items: for v2 in", "= [] for v in vertices: g.extend(v) assert len(g) ==", "See Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\" if centr is", "in elements: if commutes_with_gens(element): centralizer_list.append(element) return centralizer_list elif hasattr(other, 'getitem'):", "Permutation([2, 3, 0, 1])] See Also ======== sympy.combinatorics.perm_groups.centralizer \"\"\" from", "all elements of the group and checks for membership in", "v is a list of (base_i, gens_i, n_i, sym_i) for", ">>> from sympy.combinatorics import Permutation, PermutationGroup >>> g = Permutation([1,", "2, 4], 4:[1, 2, 3, 5], 5:[0, 4]} >>> gr2", "sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr = _distribute_gens_by_base(base, gens)", "elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for element in elements: if", "of a base and strong generating set. This is a", ">>> a = Permutation([0, 2, 3, 4, 1]) >>> b", "_af_invert(pvert) # the indices of the tensor are twice the", "subgroup/subset/element in a group. This is used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure", "tensor with number of indices equal to the degree of", "assert len(g) == num_indices g += [num_indices, num_indices + 1]", "n: base, gens = get_symmetric_group_sgs(i) v.append((base, gens, n, 0)) v.reverse()", "def graph_certificate(gr): \"\"\" Return a certificate for the graph gr", "in items] pvert = _af_invert(pvert) # the indices of the", "DihedralGroup >>> D = DihedralGroup(4) >>> _naive_list_centralizer(D, D) [Permutation([0, 1,", "= [a, b, c] >>> ls2 = [b, c, a]", "gen) for gen in gens) centralizer_list = [] if not", "True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if closure is None:", "centr = PermutationGroup([Permutation([0, 1, 2, 3, 4])]) >>> _verify_centralizer(S, A,", "d in dlist: q = tuple(_af_rmul(d, h)) st.add(q) a =", "in vertices: g.extend(v) assert len(g) == num_indices g += [num_indices,", "= num_indices + 2 assert sorted(g) == list(range(size)) g =", "testing. Examples ======== >>> from sympy.combinatorics.named_groups import AlternatingGroup >>> from", "line # between two vertices assign the # even index", "centralizer of a group/set/element inside another group. This is used", "PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() != candidate.order(): return False current_stabilizer = current_stabilizer.stabilizer(base[i])", "tensors of type `i` base_i, gens_i BSGS for tensors of", "``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup,", "dummy indices msym symmetry of the metric v is a", "symmetry 0 commuting 1 anticommuting Return 0 if the tensor", "closure of a subgroup/subset/element in a group. This is used", "[dummies] sym = [sym] else: num_types = len(sym) dgens =", "list(range(size)) g = Permutation(g) vlen = [0]*(len(vertices[0])+1) for neigh in", "Permutation from sympy.combinatorics.util import _distribute_gens_by_base rmul = Permutation.rmul def _cmp_perm_lists(first,", "from sympy.combinatorics.permutations import _af_invert from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize items", "sympy.combinatorics.testutil import canonicalize_naive >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>> from", "in second} def _naive_list_centralizer(self, other, af=False): from sympy.combinatorics.perm_groups import PermutationGroup", "2, 4], 2:[0, 1, 3, 4], 3:[0, 2, 4], 4:[1,", "if not af: for element in elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element))", "This is a brute force implementation that goes over all", "= {0:[1, 5], 1:[0, 2, 3, 4], 2:[1, 3, 5],", "vertex; indices are contracted when they correspond to the same", "set relative to it. There are other procedures for verifying", "to each vertex its indices; for each line # between", "``sympy.combinatorics.perm_groups`` Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup)", "dummies, sym, *v): \"\"\" Canonicalize tensor formed by tensors of", "array form of the permutation representing the canonical form of", "gens2 = get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2, 3], 0, (base2, gens2,", "\"\"\" if closure is None: closure = group.normal_closure(arg) conjugates =", "the graph num_indices = 0 for v, neigh in items:", "= dummy_sgs(dummies, sym, size-2) if isinstance(sym, int): num_types = 1", "unoriented and without external lines. Associate to each vertex of", "= tuple(_af_rmul(d, h)) st.add(q) a = list(st) a.sort() prev =", "num_types = 1 dummies = [dummies] sym = [sym] else:", ">>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _verify_centralizer", "in dlist: q = tuple(_af_rmul(d, h)) st.add(q) a = list(st)", "4], 4:[1, 2, 3, 5], 5:[0, 4]} >>> gr2 =", "in items] i = 0 for v, neigh in items:", "af=True) return _cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group, arg, closure=None): from sympy.combinatorics.perm_groups", "form of the permutation representing the canonical form of the", "two component tensors of type `i` None no symmetry 0", "= [dummies] sym = [sym] else: num_types = len(sym) dgens", "See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr", "3]) >>> c = Permutation([3, 4, 0, 1, 2]) >>>", "all(_af_commutes_with(x, gen) for gen in gens) centralizer_list = [] if", "generating set. This is a naive implementation using the definition", "used for testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples ======== >>> from", "arg elif hasattr(arg, 'array_form'): subgr_gens = [arg] for el in", "sym = [sym] else: num_types = len(sym) dgens = []", "sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>> from sympy.combinatorics import Permutation, PermutationGroup >>>", "len(sym) dgens = [] for i in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i],", "s in S.generate(af=True): h = _af_rmul(g, s) for d in", "group/set/element inside another group. This is used for testing ``.centralizer()``", "PermutationGroup([other]), af) def _verify_bsgs(group, base, gens): \"\"\" Verify the correctness", "import _verify_bsgs >>> A = AlternatingGroup(4) >>> A.schreier_sims() >>> _verify_bsgs(A,", "= group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group, arg, af=True)", "h)) st.add(q) a = list(st) a.sort() prev = (0,)*size for", ">>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>> from sympy.combinatorics import Permutation,", "the indices of the tensor are twice the number of", "hasattr(arg, '__getitem__'): subgr_gens = arg elif hasattr(arg, 'array_form'): subgr_gens =", "a in second} def _naive_list_centralizer(self, other, af=False): from sympy.combinatorics.perm_groups import", "for tensors of this type n_i number ot tensors of", "to the vertex which comes first in items, # the", "======== >>> from sympy.combinatorics.testutil import graph_certificate >>> gr1 = {0:[1,", "the tensor dummies list of dummy indices msym symmetry of", "[0, 2, 4, 6, 1, 8, 10, 12, 3, 14,", "if h[:-2] == prev[:-2]: if h[-1] != prev[-1]: return 0", "the number of lines of the graph num_indices = 0", "# even index to the vertex which comes first in", "for neigh in vertices: vlen[len(neigh)] += 1 v = []", "[b, c, a] >>> _cmp_perm_lists(ls1, ls2) True \"\"\" return {tuple(a)", "number ot tensors of type `i` sym_i symmetry under exchange", "indices msym symmetry of the metric v is a list", "Return a certificate for the graph gr adjacency list The", "4], 2:[1, 3, 5], 3:[1, 2, 4, 5], 4:[1, 3,", "dummies list of dummy indices msym symmetry of the metric", "g.array_form st = set() for s in S.generate(af=True): h =", "12, 3, 14, 16, 18, 5, 9, 15, 7, 11,", "centralizer_list.append(element) return centralizer_list elif hasattr(other, 'getitem'): return _naive_list_centralizer(self, PermutationGroup(other), af)", "commutes_with_gens(element): centralizer_list.append(element) return centralizer_list elif hasattr(other, 'getitem'): return _naive_list_centralizer(self, PermutationGroup(other),", "4, 5] \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup from sympy.combinatorics.tensor_can import", "(base_i, gens_i, n_i, sym_i) for tensors of type `i` base_i,", "tensor gives a certificate for the graph. This is not", "vlen[i] if n: base, gens = get_symmetric_group_sgs(i) v.append((base, gens, n,", "Permutation.rmul def _cmp_perm_lists(first, second): \"\"\" Compare two lists of permutations", "sympy.combinatorics.tensor_can import gens_products, dummy_sgs from sympy.combinatorics.permutations import Permutation, _af_rmul v1", "import get_symmetric_group_sgs, canonicalize items = list(gr.items()) items.sort(key=lambda x: len(x[1]), reverse=True)", "arg, af=True) return _cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group, arg, closure=None): from", "group.normal_closure(arg) conjugates = set() if hasattr(arg, 'generators'): subgr_gens = arg.generators", "list of (base_i, gens_i, n_i, sym_i) for tensors of type", "is currently a list, Permutation is not hashable and cannot", "gr2 = {0:[1, 5], 1:[0, 2, 3, 4], 2:[1, 3,", "Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr =", "and strong generating set, but this one will serve for", "[x._array_form for x in other.generators] commutes_with_gens = lambda x: all(_af_commutes_with(x,", "= 0 for v, neigh in items: num_indices += len(neigh)", "from sympy.combinatorics.testutil import canonicalize_naive >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>>", "tensor is zero, else return the array form of the", "closure = group.normal_closure(arg) conjugates = set() if hasattr(arg, 'generators'): subgr_gens", "closure=A) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if closure is", "get_symmetric_group_sgs >>> from sympy.combinatorics import Permutation, PermutationGroup >>> g =", "[num_indices, num_indices + 1] size = num_indices + 2 assert", "of type `i` None no symmetry 0 commuting 1 anticommuting", "size-2) if isinstance(sym, int): num_types = 1 dummies = [dummies]", "= list(self.generate_dimino(af=True)) gens = [x._array_form for x in other.generators] commutes_with_gens", "symmetry of the metric v is a list of (base_i,", "PermutationGroup >>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import", "elements of the group and checks for membership in the", "tensors of type `i` None no symmetry 0 commuting 1", "to the same line of the graph. The canonical form", "st = set() for s in S.generate(af=True): h = _af_rmul(g,", "current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() != 1: return False return True def", "exchange of two component tensors of type `i` None no", "5:[0, 2, 3, 4]} >>> c1 = graph_certificate(gr1) >>> c2", "True \"\"\" from sympy.combinatorics.permutations import _af_invert from sympy.combinatorics.tensor_can import get_symmetric_group_sgs,", "7, 11, 17, 13, 19, 20, 21] >>> c1 ==", "hashable and cannot be put into a set. Examples ========", "group for i in range(len(base)): candidate = PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order()", "from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _verify_centralizer >>>", "5], 1:[0, 2, 4], 2:[0, 1, 3, 4], 3:[0, 2,", "1, 8, 10, 12, 3, 14, 16, 18, 5, 9,", "b, c] >>> ls2 = [b, c, a] >>> _cmp_perm_lists(ls1,", "elif hasattr(other, 'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]), af) def _verify_bsgs(group, base,", "_cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group, arg, closure=None): from sympy.combinatorics.perm_groups import PermutationGroup", "[] for i in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size - 2))", "base, gens): \"\"\" Verify the correctness of a base and", "and strong generating set. This is a naive implementation using", "from sympy.combinatorics.named_groups import AlternatingGroup >>> from sympy.combinatorics.testutil import _verify_bsgs >>>", "= arg elif hasattr(arg, 'array_form'): subgr_gens = [arg] for el", "= len(sym) dgens = [] for i in range(num_types): dgens.extend(dummy_sgs(dummies[i],", "array form of a permutation is currently a list, Permutation", ">>> ls1 = [a, b, c] >>> ls2 = [b,", "_distribute_gens_by_base(base, gens) current_stabilizer = group for i in range(len(base)): candidate", "is used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>> from sympy.combinatorics.named_groups", "adjacency list The graph is assumed to be unoriented and", "each vertex of the graph a symmetric tensor with number", "index to the vertex which comes first in items, #", "pvert[v] < pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i += 2 g =", "= PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() != candidate.order(): return False current_stabilizer =", "range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size - 2)) S = PermutationGroup(sgens) D", "sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup)", "for element in elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for element", "len(g) == num_indices g += [num_indices, num_indices + 1] size", "a subgroup/set/element. This is a brute force implementation that goes", "a: if h[:-2] == prev[:-2]: if h[-1] != prev[-1]: return", "num_indices = 0 for v, neigh in items: num_indices +=", "Since the array form of a permutation is currently a", "for verifying a base and strong generating set, but this", "one will serve for more robust testing. Examples ======== >>>", "A = AlternatingGroup(3) >>> _verify_normal_closure(S, A, closure=A) True See Also", "= gens_products(*v1) dgens = dummy_sgs(dummies, sym, size-2) if isinstance(sym, int):", "[a, b, c] >>> ls2 = [b, c, a] >>>", "num_indices g += [num_indices, num_indices + 1] size = num_indices", "vlen[len(neigh)] += 1 v = [] for i in range(len(vlen)):", "import DihedralGroup >>> D = DihedralGroup(4) >>> _naive_list_centralizer(D, D) [Permutation([0,", "a base and strong generating set, but this one will", "group. This is used for testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples", "_cmp_perm_lists >>> a = Permutation([0, 2, 3, 4, 1]) >>>", "SymmetricGroup(5) >>> A = AlternatingGroup(5) >>> centr = PermutationGroup([Permutation([0, 1,", "a.sort() prev = (0,)*size for h in a: if h[:-2]", "h return list(a[0]) def graph_certificate(gr): \"\"\" Return a certificate for", "are contracted when they correspond to the same line of", "are twice the number of lines of the graph num_indices", "\"\"\" from sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr = _distribute_gens_by_base(base, gens) current_stabilizer", "from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize items = list(gr.items()) items.sort(key=lambda x:", "index to the other vertex vertices = [[] for i", "1])] See Also ======== sympy.combinatorics.perm_groups.centralizer \"\"\" from sympy.combinatorics.permutations import _af_commutes_with", "_cmp_perm_lists(first, second): \"\"\" Compare two lists of permutations as sets.", "AlternatingGroup >>> from sympy.combinatorics.testutil import _verify_bsgs >>> A = AlternatingGroup(4)", "======== sympy.combinatorics.perm_groups.centralizer \"\"\" from sympy.combinatorics.permutations import _af_commutes_with if hasattr(other, 'generators'):", "Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>>", "= AlternatingGroup(3) >>> _verify_normal_closure(S, A, closure=A) True See Also ========", "items, # the odd index to the other vertex vertices", "Associate to each vertex of the graph a symmetric tensor", "base, gens = get_symmetric_group_sgs(i) v.append((base, gens, n, 0)) v.reverse() dummies", "The canonical form of the tensor gives a certificate for", "gens = get_symmetric_group_sgs(i) v.append((base, gens, n, 0)) v.reverse() dummies =", "type `i` sym_i symmetry under exchange of two component tensors", "[] for i in range(len(vlen)): n = vlen[i] if n:", "rmul = Permutation.rmul def _cmp_perm_lists(first, second): \"\"\" Compare two lists", "'__getitem__'): subgr_gens = arg elif hasattr(arg, 'array_form'): subgr_gens = [arg]", "This is used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>> from", "the graph. The canonical form of the tensor gives a", "4:[1, 2, 3, 5], 5:[0, 4]} >>> gr2 = {0:[1,", "indices are contracted when they correspond to the same line", "import AlternatingGroup >>> from sympy.combinatorics.testutil import _verify_bsgs >>> A =", "num_indices += len(neigh) # associate to each vertex its indices;", "9, 15, 7, 11, 17, 13, 19, 20, 21] >>>", "1, 2, 3]), Permutation([2, 3, 0, 1])] See Also ========", "sympy.combinatorics.permutations import _af_commutes_with if hasattr(other, 'generators'): elements = list(self.generate_dimino(af=True)) gens", "isinstance(sym, int): num_types = 1 dummies = [dummies] sym =", "= AlternatingGroup(4) >>> A.schreier_sims() >>> _verify_bsgs(A, A.base, A.strong_gens) True See", "lines. Associate to each vertex of the graph a symmetric", "to the degree of the vertex; indices are contracted when", "5], 5:[0, 4]} >>> gr2 = {0:[1, 5], 1:[0, 2,", ">>> gr1 = {0:[1, 2, 3, 5], 1:[0, 2, 4],", "= _naive_list_centralizer(group, arg, af=True) return _cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group, arg,", "closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies, sym, *v): \"\"\" Canonicalize tensor formed", "list(a[0]) def graph_certificate(gr): \"\"\" Return a certificate for the graph", "the tensor are twice the number of lines of the", "3, 4], 2:[1, 3, 5], 3:[1, 2, 4, 5], 4:[1,", "of a permutation is currently a list, Permutation is not", "if hasattr(arg, 'generators'): subgr_gens = arg.generators elif hasattr(arg, '__getitem__'): subgr_gens", ">>> centr = PermutationGroup([Permutation([0, 1, 2, 3, 4])]) >>> _verify_centralizer(S,", "of permutations as sets. This is used for testing purposes.", "in a: if h[:-2] == prev[:-2]: if h[-1] != prev[-1]:", "dummies = list(range(num_indices)) can = canonicalize(g, dummies, 0, *v) return", "(SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.perm_groups import PermutationGroup >>> from", "= graph_certificate(gr1) >>> c2 = graph_certificate(gr2) >>> c1 [0, 2,", "2)) S = PermutationGroup(sgens) D = PermutationGroup([Permutation(x) for x in", "len(neigh) # associate to each vertex its indices; for each", "different types g permutation representing the tensor dummies list of", "_verify_centralizer(S, A, centr) True See Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists", "c] >>> ls2 = [b, c, a] >>> _cmp_perm_lists(ls1, ls2)", "items: for v2 in neigh: if pvert[v] < pvert[v2]: vertices[pvert[v]].append(i)", "= [] for i in range(len(v)): base_i, gens_i, n_i, sym_i", "pvert = _af_invert(pvert) # the indices of the tensor are", "a in first} == \\ {tuple(a) for a in second}", "Examples ======== >>> from sympy.combinatorics.testutil import _naive_list_centralizer >>> from sympy.combinatorics.named_groups", "indices; for each line # between two vertices assign the", "`i` None no symmetry 0 commuting 1 anticommuting Return 0", "indices equal to the degree of the vertex; indices are", "= PermutationGroup([Permutation([0, 1, 2, 3, 4])]) >>> _verify_centralizer(S, A, centr)", "= get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2, 3], 0, (base2, gens2, 2,", "subgr_gens: conjugates.add(gen ^ el) naive_closure = PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def", "range(len(v)): base_i, gens_i, n_i, sym_i = v[i] v1.append((base_i, gens_i, [[]]*n_i,", "type `i` None no symmetry 0 commuting 1 anticommuting Return", "gens_i, [[]]*n_i, sym_i)) size, sbase, sgens = gens_products(*v1) dgens =", "15, 7, 11, 17, 13, 19, 20, 21] >>> c1", "vertices[pvert[v2]].append(i+1) i += 2 g = [] for v in", "2 g = [] for v in vertices: g.extend(v) assert", "permutation representing the tensor dummies list of dummy indices msym", "0 for v, neigh in items: for v2 in neigh:", "= group.normal_closure(arg) conjugates = set() if hasattr(arg, 'generators'): subgr_gens =", "centralizer_list.append(Permutation._af_new(element)) else: for element in elements: if commutes_with_gens(element): centralizer_list.append(element) return", "for gen in gens) centralizer_list = [] if not af:", "else return the array form of the permutation representing the", "0, 1])] See Also ======== sympy.combinatorics.perm_groups.centralizer \"\"\" from sympy.combinatorics.permutations import", "hasattr(arg, 'array_form'): subgr_gens = [arg] for el in group.generate_dimino(): for", "sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Return a list of elements for", "A.strong_gens) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from sympy.combinatorics.perm_groups import", "gen in gens) centralizer_list = [] if not af: for", "which comes first in items, # the odd index to", "of dummy indices msym symmetry of the metric v is", "of a subgroup/set/element. This is a brute force implementation that", "elif hasattr(arg, 'array_form'): subgr_gens = [arg] for el in group.generate_dimino():", "elements for the centralizer of a subgroup/set/element. This is a", "set() for s in S.generate(af=True): h = _af_rmul(g, s) for", "2, 3, 5], 5:[0, 4]} >>> gr2 = {0:[1, 5],", "comes first in items, # the odd index to the", "prev = (0,)*size for h in a: if h[:-2] ==", "if n: base, gens = get_symmetric_group_sgs(i) v.append((base, gens, n, 0))", "from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _cmp_perm_lists >>>", "permutation representing the canonical form of the tensor. Examples ========", "# between two vertices assign the # even index to", "return centralizer_list elif hasattr(other, 'getitem'): return _naive_list_centralizer(self, PermutationGroup(other), af) elif", "the metric v is a list of (base_i, gens_i, n_i,", "dgens.extend(dummy_sgs(dummies[i], sym[i], size - 2)) S = PermutationGroup(sgens) D =", "graph a symmetric tensor with number of indices equal to", "_verify_normal_closure >>> S = SymmetricGroup(3) >>> A = AlternatingGroup(3) >>>", "h[-1] != prev[-1]: return 0 prev = h return list(a[0])", "for x in dgens]) dlist = list(D.generate(af=True)) g = g.array_form", "if hasattr(other, 'generators'): elements = list(self.generate_dimino(af=True)) gens = [x._array_form for", "an efficient algorithm to get the certificate of a graph.", "v = [] for i in range(len(vlen)): n = vlen[i]", "centr) True See Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\" if", "graph. The canonical form of the tensor gives a certificate", "3, 4, 5] \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup from sympy.combinatorics.tensor_can", "for gen in subgr_gens: conjugates.add(gen ^ el) naive_closure = PermutationGroup(list(conjugates))", "if current_stabilizer.order() != candidate.order(): return False current_stabilizer = current_stabilizer.stabilizer(base[i]) if", "S.generate(af=True): h = _af_rmul(g, s) for d in dlist: q", "each line # between two vertices assign the # even", "that goes over all elements of the group and checks", "implementation using the definition of a base and a strong", "sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\" if centr is None: centr = group.centralizer(arg)", "= Permutation([1, 3, 2, 0, 4, 5]) >>> base2, gens2", ">>> A.schreier_sims() >>> _verify_bsgs(A, A.base, A.strong_gens) True See Also ========", "[arg] for el in group.generate_dimino(): for gen in subgr_gens: conjugates.add(gen", "`i` sym_i symmetry under exchange of two component tensors of", "g = Permutation(g) vlen = [0]*(len(vertices[0])+1) for neigh in vertices:", "3:[0, 2, 4], 4:[1, 2, 3, 5], 5:[0, 4]} >>>", "_verify_centralizer >>> S = SymmetricGroup(5) >>> A = AlternatingGroup(5) >>>", "A, closure=A) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if closure", "the graph. This is not an efficient algorithm to get", "2, 3]), Permutation([2, 3, 0, 1])] See Also ======== sympy.combinatorics.perm_groups.centralizer", "dgens = [] for i in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size", "in range(len(v)): base_i, gens_i, n_i, sym_i = v[i] v1.append((base_i, gens_i,", "in the centralizer. It is used to test ``.centralizer()`` from", "Examples ======== >>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil", "3, 4, 1]) >>> b = Permutation([1, 2, 0, 4,", "centr_list_naive = _naive_list_centralizer(group, arg, af=True) return _cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group,", "the canonical form of the tensor. Examples ======== >>> from", "a set. Examples ======== >>> from sympy.combinatorics.permutations import Permutation >>>", "for i in range(len(base)): candidate = PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() !=", "Verify the correctness of a base and strong generating set.", "c2 = graph_certificate(gr2) >>> c1 [0, 2, 4, 6, 1,", "two lists of permutations as sets. This is used for", "See Also ======== sympy.combinatorics.perm_groups.centralizer \"\"\" from sympy.combinatorics.permutations import _af_commutes_with if", "sympy.combinatorics.named_groups import AlternatingGroup >>> from sympy.combinatorics.testutil import _verify_bsgs >>> A", "!= 1: return False return True def _verify_centralizer(group, arg, centr=None):", "gens_products(*v1) dgens = dummy_sgs(dummies, sym, size-2) if isinstance(sym, int): num_types", ">>> g = Permutation([1, 3, 2, 0, 4, 5]) >>>", "list(st) a.sort() prev = (0,)*size for h in a: if", "using the definition of a base and a strong generating", "(SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.testutil import _verify_normal_closure >>> S", "D = DihedralGroup(4) >>> _naive_list_centralizer(D, D) [Permutation([0, 1, 2, 3]),", ">>> _verify_bsgs(A, A.base, A.strong_gens) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\"", "a list of elements for the centralizer of a subgroup/set/element.", "import gens_products, dummy_sgs from sympy.combinatorics.permutations import Permutation, _af_rmul v1 =", "strong generating set. This is a naive implementation using the", "c1 == c2 True \"\"\" from sympy.combinatorics.permutations import _af_invert from", "_af_invert from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize items = list(gr.items()) items.sort(key=lambda", "list(gr.items()) items.sort(key=lambda x: len(x[1]), reverse=True) pvert = [x[0] for x", "if centr is None: centr = group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True))", ">>> from sympy.combinatorics.named_groups import AlternatingGroup >>> from sympy.combinatorics.testutil import _verify_bsgs", "is used for testing purposes. Since the array form of", "af: for element in elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for", "[sym] else: num_types = len(sym) dgens = [] for i", "SymmetricGroup(3) >>> A = AlternatingGroup(3) >>> _verify_normal_closure(S, A, closure=A) True", "for element in elements: if commutes_with_gens(element): centralizer_list.append(element) return centralizer_list elif", "msym symmetry of the metric v is a list of", "g += [num_indices, num_indices + 1] size = num_indices +", "sympy.combinatorics.testutil import _cmp_perm_lists >>> a = Permutation([0, 2, 3, 4,", "hasattr(other, 'getitem'): return _naive_list_centralizer(self, PermutationGroup(other), af) elif hasattr(other, 'array_form'): return", "= [sym] else: num_types = len(sym) dgens = [] for", "arg, centr=None): \"\"\" Verify the centralizer of a group/set/element inside", "range(len(base)): candidate = PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() != candidate.order(): return False", "= _af_invert(pvert) # the indices of the tensor are twice", "af=False): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Return a list of", "elements = list(self.generate_dimino(af=True)) gens = [x._array_form for x in other.generators]", "type `i` base_i, gens_i BSGS for tensors of this type", "vertex of the graph a symmetric tensor with number of", "[[] for i in items] i = 0 for v,", "certificate of a graph. Examples ======== >>> from sympy.combinatorics.testutil import", "return True def _verify_centralizer(group, arg, centr=None): \"\"\" Verify the centralizer", "a base and a strong generating set relative to it.", "D) [Permutation([0, 1, 2, 3]), Permutation([2, 3, 0, 1])] See", "permutation is currently a list, Permutation is not hashable and", "not hashable and cannot be put into a set. Examples", "a base and strong generating set. This is a naive", "sympy.combinatorics.testutil import _verify_normal_closure >>> S = SymmetricGroup(3) >>> A =", "the different types g permutation representing the tensor dummies list", "3], 0, (base2, gens2, 2, 0)) [0, 2, 1, 3,", "3, 14, 16, 18, 5, 9, 15, 7, 11, 17,", "import Permutation, _af_rmul v1 = [] for i in range(len(v)):", "h = _af_rmul(g, s) for d in dlist: q =", "======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\" if centr is None: centr", ">>> canonicalize_naive(g, [2, 3], 0, (base2, gens2, 2, 0)) [0,", "Examples ======== >>> from sympy.combinatorics.named_groups import AlternatingGroup >>> from sympy.combinatorics.testutil", "from sympy.combinatorics.testutil import _verify_centralizer >>> S = SymmetricGroup(5) >>> A", "for d in dlist: q = tuple(_af_rmul(d, h)) st.add(q) a", "without external lines. Associate to each vertex of the graph", "get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2, 3], 0, (base2, gens2, 2, 0))", "same line of the graph. The canonical form of the", "a certificate for the graph. This is not an efficient", "strong generating set, but this one will serve for more", "graph num_indices = 0 for v, neigh in items: num_indices", "g = [] for v in vertices: g.extend(v) assert len(g)", "3:[1, 2, 4, 5], 4:[1, 3, 5], 5:[0, 2, 3,", "len(x[1]), reverse=True) pvert = [x[0] for x in items] pvert", "vertex which comes first in items, # the odd index", ">>> from sympy.combinatorics.testutil import _verify_centralizer >>> S = SymmetricGroup(5) >>>", "put into a set. Examples ======== >>> from sympy.combinatorics.permutations import", "the graph gr adjacency list The graph is assumed to", "2, 4, 6, 1, 8, 10, 12, 3, 14, 16,", "1:[0, 2, 4], 2:[0, 1, 3, 4], 3:[0, 2, 4],", "a strong generating set relative to it. There are other", "from sympy.combinatorics import Permutation, PermutationGroup >>> g = Permutation([1, 3,", "import Permutation, PermutationGroup >>> g = Permutation([1, 3, 2, 0,", "the same line of the graph. The canonical form of", "0, 4, 5]) >>> base2, gens2 = get_symmetric_group_sgs(2) >>> canonicalize_naive(g,", "the group and checks for membership in the centralizer. It", "for tensors of type `i` base_i, gens_i BSGS for tensors", "True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup", "group and checks for membership in the centralizer. It is", "sorted(g) == list(range(size)) g = Permutation(g) vlen = [0]*(len(vertices[0])+1) for", "ls2) True \"\"\" return {tuple(a) for a in first} ==", "in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size - 2)) S = PermutationGroup(sgens)", "canonical form of the tensor. Examples ======== >>> from sympy.combinatorics.testutil", "PermutationGroup([Permutation([0, 1, 2, 3, 4])]) >>> _verify_centralizer(S, A, centr) True", "the permutation representing the canonical form of the tensor. Examples", "vlen = [0]*(len(vertices[0])+1) for neigh in vertices: vlen[len(neigh)] += 1", "import Permutation from sympy.combinatorics.util import _distribute_gens_by_base rmul = Permutation.rmul def", "======== >>> from sympy.combinatorics.named_groups import AlternatingGroup >>> from sympy.combinatorics.testutil import", "num_types = len(sym) dgens = [] for i in range(num_types):", "= [arg] for el in group.generate_dimino(): for gen in subgr_gens:", "i in range(len(vlen)): n = vlen[i] if n: base, gens", "Permutation([3, 4, 0, 1, 2]) >>> ls1 = [a, b,", "1, 2, 3, 4])]) >>> _verify_centralizer(S, A, centr) True See", "1:[0, 2, 3, 4], 2:[1, 3, 5], 3:[1, 2, 4,", "in range(len(vlen)): n = vlen[i] if n: base, gens =", "5:[0, 4]} >>> gr2 = {0:[1, 5], 1:[0, 2, 3,", "centralizer_list = [] if not af: for element in elements:", "sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr = _distribute_gens_by_base(base, gens) current_stabilizer = group", ">>> _verify_centralizer(S, A, centr) True See Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer,", "5], 1:[0, 2, 3, 4], 2:[1, 3, 5], 3:[1, 2,", "1 v = [] for i in range(len(vlen)): n =", "robust testing. Examples ======== >>> from sympy.combinatorics.named_groups import AlternatingGroup >>>", "False current_stabilizer = current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() != 1: return False", "a = list(st) a.sort() prev = (0,)*size for h in", "into a set. Examples ======== >>> from sympy.combinatorics.permutations import Permutation", "in vertices: vlen[len(neigh)] += 1 v = [] for i", "a permutation is currently a list, Permutation is not hashable", "_verify_bsgs >>> A = AlternatingGroup(4) >>> A.schreier_sims() >>> _verify_bsgs(A, A.base,", "current_stabilizer = group for i in range(len(base)): candidate = PermutationGroup(strong_gens_distr[i])", "\"\"\" if centr is None: centr = group.centralizer(arg) centr_list =", "in dgens]) dlist = list(D.generate(af=True)) g = g.array_form st =", "be unoriented and without external lines. Associate to each vertex", "associate to each vertex its indices; for each line #", "n, 0)) v.reverse() dummies = list(range(num_indices)) can = canonicalize(g, dummies,", "will serve for more robust testing. Examples ======== >>> from", "It is used to test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples ========", "hasattr(arg, 'generators'): subgr_gens = arg.generators elif hasattr(arg, '__getitem__'): subgr_gens =", "used for testing purposes. Since the array form of a", "checks for membership in the centralizer. It is used to", "set, but this one will serve for more robust testing.", "= set() for s in S.generate(af=True): h = _af_rmul(g, s)", "the # even index to the vertex which comes first", "0 if the tensor is zero, else return the array", "PermutationGroup >>> g = Permutation([1, 3, 2, 0, 4, 5])", "Return a list of elements for the centralizer of a", "for el in group.generate_dimino(): for gen in subgr_gens: conjugates.add(gen ^", "Permutation >>> from sympy.combinatorics.testutil import _cmp_perm_lists >>> a = Permutation([0,", "each vertex its indices; for each line # between two", "size = num_indices + 2 assert sorted(g) == list(range(size)) g", "2, 0, 4, 5]) >>> base2, gens2 = get_symmetric_group_sgs(2) >>>", "gens_i, n_i, sym_i) for tensors of type `i` base_i, gens_i", "of the tensor gives a certificate for the graph. This", "c2 True \"\"\" from sympy.combinatorics.permutations import _af_invert from sympy.combinatorics.tensor_can import", "[Permutation([0, 1, 2, 3]), Permutation([2, 3, 0, 1])] See Also", "centralizer_list elif hasattr(other, 'getitem'): return _naive_list_centralizer(self, PermutationGroup(other), af) elif hasattr(other,", "in a group. This is used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples", "to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup,", "for the centralizer of a subgroup/set/element. This is a brute", "tensors of this type n_i number ot tensors of type", "The graph is assumed to be unoriented and without external", "4]} >>> c1 = graph_certificate(gr1) >>> c2 = graph_certificate(gr2) >>>", "even index to the vertex which comes first in items,", "more robust testing. Examples ======== >>> from sympy.combinatorics.named_groups import AlternatingGroup", "num_indices + 1] size = num_indices + 2 assert sorted(g)", "dlist: q = tuple(_af_rmul(d, h)) st.add(q) a = list(st) a.sort()", "= list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group, arg, af=True) return _cmp_perm_lists(centr_list, centr_list_naive)", "items = list(gr.items()) items.sort(key=lambda x: len(x[1]), reverse=True) pvert = [x[0]", "contracted when they correspond to the same line of the", "return _naive_list_centralizer(self, PermutationGroup(other), af) elif hasattr(other, 'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]),", "for x in other.generators] commutes_with_gens = lambda x: all(_af_commutes_with(x, gen)", "hasattr(other, 'generators'): elements = list(self.generate_dimino(af=True)) gens = [x._array_form for x", "PermutationGroup(other), af) elif hasattr(other, 'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]), af) def", "= get_symmetric_group_sgs(i) v.append((base, gens, n, 0)) v.reverse() dummies = list(range(num_indices))", "======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr = _distribute_gens_by_base(base,", "{tuple(a) for a in first} == \\ {tuple(a) for a", "items] i = 0 for v, neigh in items: for", "S = PermutationGroup(sgens) D = PermutationGroup([Permutation(x) for x in dgens])", "2:[1, 3, 5], 3:[1, 2, 4, 5], 4:[1, 3, 5],", "3, 5], 3:[1, 2, 4, 5], 4:[1, 3, 5], 5:[0,", "i = 0 for v, neigh in items: for v2", "sym[i], size - 2)) S = PermutationGroup(sgens) D = PermutationGroup([Permutation(x)", "x: all(_af_commutes_with(x, gen) for gen in gens) centralizer_list = []", "if isinstance(sym, int): num_types = 1 dummies = [dummies] sym", "items.sort(key=lambda x: len(x[1]), reverse=True) pvert = [x[0] for x in", "for membership in the centralizer. It is used to test", "v in vertices: g.extend(v) assert len(g) == num_indices g +=", "list, Permutation is not hashable and cannot be put into", "normal closure of a subgroup/subset/element in a group. This is", "base2, gens2 = get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2, 3], 0, (base2,", "\"\"\" Return a list of elements for the centralizer of", "and a strong generating set relative to it. There are", "14, 16, 18, 5, 9, 15, 7, 11, 17, 13,", "c1 = graph_certificate(gr1) >>> c2 = graph_certificate(gr2) >>> c1 [0,", "the degree of the vertex; indices are contracted when they", "neigh in items: for v2 in neigh: if pvert[v] <", "used to test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples ======== >>> from", "from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.testutil import", "Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\" if centr is None:", "import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.perm_groups import PermutationGroup >>>", "anticommuting Return 0 if the tensor is zero, else return", "1 anticommuting Return 0 if the tensor is zero, else", "items] pvert = _af_invert(pvert) # the indices of the tensor", "``sympy.combinatorics.perm_groups``. Examples ======== >>> from sympy.combinatorics.testutil import _naive_list_centralizer >>> from", "n = vlen[i] if n: base, gens = get_symmetric_group_sgs(i) v.append((base,", "'generators'): elements = list(self.generate_dimino(af=True)) gens = [x._array_form for x in", "s) for d in dlist: q = tuple(_af_rmul(d, h)) st.add(q)", "c, a] >>> _cmp_perm_lists(ls1, ls2) True \"\"\" return {tuple(a) for", "sympy.combinatorics.perm_groups import PermutationGroup from sympy.combinatorics.tensor_can import gens_products, dummy_sgs from sympy.combinatorics.permutations", "dlist = list(D.generate(af=True)) g = g.array_form st = set() for", "for h in a: if h[:-2] == prev[:-2]: if h[-1]", ">>> c1 [0, 2, 4, 6, 1, 8, 10, 12,", "[x[0] for x in items] pvert = _af_invert(pvert) # the", "number of indices equal to the degree of the vertex;", "procedures for verifying a base and strong generating set, but", "+ 1] size = num_indices + 2 assert sorted(g) ==", "Examples ======== >>> from sympy.combinatorics.testutil import graph_certificate >>> gr1 =", "graph_certificate(gr2) >>> c1 [0, 2, 4, 6, 1, 8, 10,", "of the different types g permutation representing the tensor dummies", "import Permutation >>> from sympy.combinatorics.testutil import _cmp_perm_lists >>> a =", "sympy.combinatorics.perm_groups.centralizer \"\"\" from sympy.combinatorics.permutations import _af_commutes_with if hasattr(other, 'generators'): elements", "the certificate of a graph. Examples ======== >>> from sympy.combinatorics.testutil", "force implementation that goes over all elements of the group", "Verify the normal closure of a subgroup/subset/element in a group.", "group.generate_dimino(): for gen in subgr_gens: conjugates.add(gen ^ el) naive_closure =", "4, 5]) >>> base2, gens2 = get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2,", "in S.generate(af=True): h = _af_rmul(g, s) for d in dlist:", "hasattr(other, 'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]), af) def _verify_bsgs(group, base, gens):", "a group. This is used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ========", "_verify_centralizer(group, arg, centr=None): \"\"\" Verify the centralizer of a group/set/element", "None no symmetry 0 commuting 1 anticommuting Return 0 if", "v[i] v1.append((base_i, gens_i, [[]]*n_i, sym_i)) size, sbase, sgens = gens_products(*v1)", "None: centr = group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group,", "between two vertices assign the # even index to the", "_cmp_perm_lists \"\"\" if centr is None: centr = group.centralizer(arg) centr_list", "Canonicalize tensor formed by tensors of the different types g", "set() if hasattr(arg, 'generators'): subgr_gens = arg.generators elif hasattr(arg, '__getitem__'):", "in items: num_indices += len(neigh) # associate to each vertex", "of the permutation representing the canonical form of the tensor.", "g = g.array_form st = set() for s in S.generate(af=True):", "_verify_bsgs(A, A.base, A.strong_gens) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from", ">>> ls2 = [b, c, a] >>> _cmp_perm_lists(ls1, ls2) True", "if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for element in elements: if commutes_with_gens(element):", "naive implementation using the definition of a base and a", "for testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples ======== >>> from sympy.combinatorics.named_groups", "= AlternatingGroup(5) >>> centr = PermutationGroup([Permutation([0, 1, 2, 3, 4])])", ">>> c = Permutation([3, 4, 0, 1, 2]) >>> ls1", "!= candidate.order(): return False current_stabilizer = current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() !=", "19, 20, 21] >>> c1 == c2 True \"\"\" from", "\"\"\" return {tuple(a) for a in first} == \\ {tuple(a)", "'array_form'): subgr_gens = [arg] for el in group.generate_dimino(): for gen", "{0:[1, 5], 1:[0, 2, 3, 4], 2:[1, 3, 5], 3:[1,", "sbase, sgens = gens_products(*v1) dgens = dummy_sgs(dummies, sym, size-2) if", "for v, neigh in items: for v2 in neigh: if", "number of lines of the graph num_indices = 0 for", "== num_indices g += [num_indices, num_indices + 1] size =", "vertex its indices; for each line # between two vertices", "D = PermutationGroup([Permutation(x) for x in dgens]) dlist = list(D.generate(af=True))", "gens) current_stabilizer = group for i in range(len(base)): candidate =", ">>> from sympy.combinatorics.named_groups import DihedralGroup >>> D = DihedralGroup(4) >>>", "gens = [x._array_form for x in other.generators] commutes_with_gens = lambda", "AlternatingGroup(5) >>> centr = PermutationGroup([Permutation([0, 1, 2, 3, 4])]) >>>", "to each vertex of the graph a symmetric tensor with", ">>> gr2 = {0:[1, 5], 1:[0, 2, 3, 4], 2:[1,", "0 commuting 1 anticommuting Return 0 if the tensor is", "_naive_list_centralizer >>> from sympy.combinatorics.named_groups import DihedralGroup >>> D = DihedralGroup(4)", "naive_closure = PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies, sym, *v):", "gives a certificate for the graph. This is not an", "of elements for the centralizer of a subgroup/set/element. This is", "sgens = gens_products(*v1) dgens = dummy_sgs(dummies, sym, size-2) if isinstance(sym,", "canonicalize_naive >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>> from sympy.combinatorics import", "tuple(_af_rmul(d, h)) st.add(q) a = list(st) a.sort() prev = (0,)*size", ">>> base2, gens2 = get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2, 3], 0,", "i += 2 g = [] for v in vertices:", ">>> from sympy.combinatorics.perm_groups import PermutationGroup >>> from sympy.combinatorics.permutations import Permutation", ">>> S = SymmetricGroup(3) >>> A = AlternatingGroup(3) >>> _verify_normal_closure(S,", "Compare two lists of permutations as sets. This is used", "elif hasattr(other, 'getitem'): return _naive_list_centralizer(self, PermutationGroup(other), af) elif hasattr(other, 'array_form'):", "DihedralGroup(4) >>> _naive_list_centralizer(D, D) [Permutation([0, 1, 2, 3]), Permutation([2, 3,", "for a in second} def _naive_list_centralizer(self, other, af=False): from sympy.combinatorics.perm_groups", "goes over all elements of the group and checks for", "6, 1, 8, 10, 12, 3, 14, 16, 18, 5,", "they correspond to the same line of the graph. The", "graph. Examples ======== >>> from sympy.combinatorics.testutil import graph_certificate >>> gr1", "v1 = [] for i in range(len(v)): base_i, gens_i, n_i,", "tensor dummies list of dummy indices msym symmetry of the", "element in elements: if commutes_with_gens(element): centralizer_list.append(element) return centralizer_list elif hasattr(other,", "4, 6, 1, 8, 10, 12, 3, 14, 16, 18,", "Return 0 if the tensor is zero, else return the", "gr adjacency list The graph is assumed to be unoriented", "21] >>> c1 == c2 True \"\"\" from sympy.combinatorics.permutations import", "sympy.combinatorics.named_groups import DihedralGroup >>> D = DihedralGroup(4) >>> _naive_list_centralizer(D, D)", "def _verify_bsgs(group, base, gens): \"\"\" Verify the correctness of a", "is used to test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples ======== >>>", "0, (base2, gens2, 2, 0)) [0, 2, 1, 3, 4,", "in first} == \\ {tuple(a) for a in second} def", "algorithm to get the certificate of a graph. Examples ========", "centr_list_naive) def _verify_normal_closure(group, arg, closure=None): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\"", "representing the tensor dummies list of dummy indices msym symmetry", "if h[-1] != prev[-1]: return 0 prev = h return", "get the certificate of a graph. Examples ======== >>> from", "in elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for element in elements:", "3, 5], 5:[0, 2, 3, 4]} >>> c1 = graph_certificate(gr1)", ">>> c1 == c2 True \"\"\" from sympy.combinatorics.permutations import _af_invert", "symmetric tensor with number of indices equal to the degree", "= current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() != 1: return False return True", "range(len(vlen)): n = vlen[i] if n: base, gens = get_symmetric_group_sgs(i)", "= list(range(num_indices)) can = canonicalize(g, dummies, 0, *v) return can", ">>> from sympy.combinatorics.testutil import _naive_list_centralizer >>> from sympy.combinatorics.named_groups import DihedralGroup", "2:[0, 1, 3, 4], 3:[0, 2, 4], 4:[1, 2, 3,", "sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if closure is None: closure = group.normal_closure(arg) conjugates", "This is used for testing purposes. Since the array form", "A.base, A.strong_gens) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from sympy.combinatorics.perm_groups", "A, centr) True See Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\"", "x: len(x[1]), reverse=True) pvert = [x[0] for x in items]", "testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples ======== >>> from sympy.combinatorics.named_groups import", "sym_i = v[i] v1.append((base_i, gens_i, [[]]*n_i, sym_i)) size, sbase, sgens", ">>> _verify_normal_closure(S, A, closure=A) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\"", "h[:-2] == prev[:-2]: if h[-1] != prev[-1]: return 0 prev", "g.extend(v) assert len(g) == num_indices g += [num_indices, num_indices +", "centr is None: centr = group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True)) centr_list_naive", "_naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\" if centr is None: centr =", "the definition of a base and a strong generating set", "A = AlternatingGroup(5) >>> centr = PermutationGroup([Permutation([0, 1, 2, 3,", "dgens]) dlist = list(D.generate(af=True)) g = g.array_form st = set()", "== list(range(size)) g = Permutation(g) vlen = [0]*(len(vertices[0])+1) for neigh", "Permutation([1, 3, 2, 0, 4, 5]) >>> base2, gens2 =", "BSGS for tensors of this type n_i number ot tensors", "brute force implementation that goes over all elements of the", "= [0]*(len(vertices[0])+1) for neigh in vertices: vlen[len(neigh)] += 1 v", "vertex vertices = [[] for i in items] i =", "the array form of the permutation representing the canonical form", "import graph_certificate >>> gr1 = {0:[1, 2, 3, 5], 1:[0,", "sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize items = list(gr.items()) items.sort(key=lambda x: len(x[1]),", "'generators'): subgr_gens = arg.generators elif hasattr(arg, '__getitem__'): subgr_gens = arg", "commuting 1 anticommuting Return 0 if the tensor is zero,", "import PermutationGroup >>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil", "implementation that goes over all elements of the group and", "= g.array_form st = set() for s in S.generate(af=True): h", "return 0 prev = h return list(a[0]) def graph_certificate(gr): \"\"\"", ">>> A = AlternatingGroup(3) >>> _verify_normal_closure(S, A, closure=A) True See", "arg.generators elif hasattr(arg, '__getitem__'): subgr_gens = arg elif hasattr(arg, 'array_form'):", "dummies = [dummies] sym = [sym] else: num_types = len(sym)", "neigh: if pvert[v] < pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i += 2", "vertices = [[] for i in items] i = 0", "< pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i += 2 g = []", "0, 1, 2]) >>> ls1 = [a, b, c] >>>", "list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group, arg, af=True) return _cmp_perm_lists(centr_list, centr_list_naive) def", "[] for v in vertices: g.extend(v) assert len(g) == num_indices", "\"\"\" Verify the centralizer of a group/set/element inside another group.", "3]), Permutation([2, 3, 0, 1])] See Also ======== sympy.combinatorics.perm_groups.centralizer \"\"\"", ">>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.perm_groups", "`i` base_i, gens_i BSGS for tensors of this type n_i", "= group for i in range(len(base)): candidate = PermutationGroup(strong_gens_distr[i]) if", "ot tensors of type `i` sym_i symmetry under exchange of", "import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.testutil import _verify_normal_closure >>>", "certificate for the graph. This is not an efficient algorithm", "to the other vertex vertices = [[] for i in", "= {0:[1, 2, 3, 5], 1:[0, 2, 4], 2:[0, 1,", "from sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr = _distribute_gens_by_base(base, gens) current_stabilizer =", "prev[-1]: return 0 prev = h return list(a[0]) def graph_certificate(gr):", "return _naive_list_centralizer(self, PermutationGroup([other]), af) def _verify_bsgs(group, base, gens): \"\"\" Verify", "sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.perm_groups import PermutationGroup", "return False return True def _verify_centralizer(group, arg, centr=None): \"\"\" Verify", "= set() if hasattr(arg, 'generators'): subgr_gens = arg.generators elif hasattr(arg,", "13, 19, 20, 21] >>> c1 == c2 True \"\"\"", "the tensor gives a certificate for the graph. This is", "2, 3, 4], 2:[1, 3, 5], 3:[1, 2, 4, 5],", "4]} >>> gr2 = {0:[1, 5], 1:[0, 2, 3, 4],", "from sympy.combinatorics.permutations import Permutation, _af_rmul v1 = [] for i", "= (0,)*size for h in a: if h[:-2] == prev[:-2]:", "_naive_list_centralizer(self, PermutationGroup([other]), af) def _verify_bsgs(group, base, gens): \"\"\" Verify the", "commutes_with_gens = lambda x: all(_af_commutes_with(x, gen) for gen in gens)", "type n_i number ot tensors of type `i` sym_i symmetry", "centr=None): \"\"\" Verify the centralizer of a group/set/element inside another", ">>> c1 = graph_certificate(gr1) >>> c2 = graph_certificate(gr2) >>> c1", "but this one will serve for more robust testing. Examples", "_af_rmul(g, s) for d in dlist: q = tuple(_af_rmul(d, h))", "+= [num_indices, num_indices + 1] size = num_indices + 2", "gens_products, dummy_sgs from sympy.combinatorics.permutations import Permutation, _af_rmul v1 = []", "from sympy.combinatorics.util import _distribute_gens_by_base rmul = Permutation.rmul def _cmp_perm_lists(first, second):", "True def _verify_centralizer(group, arg, centr=None): \"\"\" Verify the centralizer of", "2, 1, 3, 4, 5] \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup", "sympy.combinatorics.permutations import Permutation, _af_rmul v1 = [] for i in", "Verify the centralizer of a group/set/element inside another group. This", "_verify_bsgs(group, base, gens): \"\"\" Verify the correctness of a base", "correctness of a base and strong generating set. This is", ">>> A = AlternatingGroup(4) >>> A.schreier_sims() >>> _verify_bsgs(A, A.base, A.strong_gens)", "cannot be put into a set. Examples ======== >>> from", "This is a naive implementation using the definition of a", "membership in the centralizer. It is used to test ``.centralizer()``", "current_stabilizer = current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() != 1: return False return", "c = Permutation([3, 4, 0, 1, 2]) >>> ls1 =", "subgr_gens = arg.generators elif hasattr(arg, '__getitem__'): subgr_gens = arg elif", "... AlternatingGroup) >>> from sympy.combinatorics.testutil import _verify_normal_closure >>> S =", "of the tensor. Examples ======== >>> from sympy.combinatorics.testutil import canonicalize_naive", "\\ {tuple(a) for a in second} def _naive_list_centralizer(self, other, af=False):", "base_i, gens_i, n_i, sym_i = v[i] v1.append((base_i, gens_i, [[]]*n_i, sym_i))", "Permutation(g) vlen = [0]*(len(vertices[0])+1) for neigh in vertices: vlen[len(neigh)] +=", "it. There are other procedures for verifying a base and", "from sympy.combinatorics.perm_groups import PermutationGroup >>> from sympy.combinatorics.permutations import Permutation >>>", "graph is assumed to be unoriented and without external lines.", "form of the tensor gives a certificate for the graph.", "2, 0)) [0, 2, 1, 3, 4, 5] \"\"\" from", "11, 17, 13, 19, 20, 21] >>> c1 == c2", "odd index to the other vertex vertices = [[] for", "False return True def _verify_centralizer(group, arg, centr=None): \"\"\" Verify the", "= Permutation(g) vlen = [0]*(len(vertices[0])+1) for neigh in vertices: vlen[len(neigh)]", "a list, Permutation is not hashable and cannot be put", "to get the certificate of a graph. Examples ======== >>>", "5], 3:[1, 2, 4, 5], 4:[1, 3, 5], 5:[0, 2,", "indices of the tensor are twice the number of lines", "``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples ======== >>> from sympy.combinatorics.testutil import _naive_list_centralizer", "sympy.combinatorics.testutil import _naive_list_centralizer >>> from sympy.combinatorics.named_groups import DihedralGroup >>> D", "= [x[0] for x in items] pvert = _af_invert(pvert) #", "the centralizer of a group/set/element inside another group. This is", "= PermutationGroup(sgens) D = PermutationGroup([Permutation(x) for x in dgens]) dlist", "second): \"\"\" Compare two lists of permutations as sets. This", "\"\"\" from sympy.combinatorics.perm_groups import PermutationGroup from sympy.combinatorics.tensor_can import gens_products, dummy_sgs", "[0, 2, 1, 3, 4, 5] \"\"\" from sympy.combinatorics.perm_groups import", "_verify_normal_closure(S, A, closure=A) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if", "h in a: if h[:-2] == prev[:-2]: if h[-1] !=", "of the tensor are twice the number of lines of", ">>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.testutil", "of (base_i, gens_i, n_i, sym_i) for tensors of type `i`", "of the graph a symmetric tensor with number of indices", "other procedures for verifying a base and strong generating set,", "def _naive_list_centralizer(self, other, af=False): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Return", "a subgroup/subset/element in a group. This is used to test", "gen in subgr_gens: conjugates.add(gen ^ el) naive_closure = PermutationGroup(list(conjugates)) return", "v1.append((base_i, gens_i, [[]]*n_i, sym_i)) size, sbase, sgens = gens_products(*v1) dgens", "a] >>> _cmp_perm_lists(ls1, ls2) True \"\"\" return {tuple(a) for a", "3, 0, 1])] See Also ======== sympy.combinatorics.perm_groups.centralizer \"\"\" from sympy.combinatorics.permutations", "sympy.combinatorics.perm_groups import PermutationGroup >>> from sympy.combinatorics.permutations import Permutation >>> from", "= _distribute_gens_by_base(base, gens) current_stabilizer = group for i in range(len(base)):", "of a group/set/element inside another group. This is used for", "3, 5], 1:[0, 2, 4], 2:[0, 1, 3, 4], 3:[0,", "PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies, sym, *v): \"\"\" Canonicalize", "======== >>> from sympy.combinatorics.testutil import canonicalize_naive >>> from sympy.combinatorics.tensor_can import", "get_symmetric_group_sgs(i) v.append((base, gens, n, 0)) v.reverse() dummies = list(range(num_indices)) can", "Permutation, PermutationGroup >>> g = Permutation([1, 3, 2, 0, 4,", "list(D.generate(af=True)) g = g.array_form st = set() for s in", "subgr_gens = arg elif hasattr(arg, 'array_form'): subgr_gens = [arg] for", "size, sbase, sgens = gens_products(*v1) dgens = dummy_sgs(dummies, sym, size-2)", "graph_certificate(gr): \"\"\" Return a certificate for the graph gr adjacency", "lines of the graph num_indices = 0 for v, neigh", "= Permutation([3, 4, 0, 1, 2]) >>> ls1 = [a,", "strong_gens_distr = _distribute_gens_by_base(base, gens) current_stabilizer = group for i in", "of type `i` base_i, gens_i BSGS for tensors of this", "[2, 3], 0, (base2, gens2, 2, 0)) [0, 2, 1,", "not an efficient algorithm to get the certificate of a", "1]) >>> b = Permutation([1, 2, 0, 4, 3]) >>>", "g = Permutation([1, 3, 2, 0, 4, 5]) >>> base2,", "a graph. Examples ======== >>> from sympy.combinatorics.testutil import graph_certificate >>>", "2, 3, 4]} >>> c1 = graph_certificate(gr1) >>> c2 =", "int): num_types = 1 dummies = [dummies] sym = [sym]", "4], 3:[0, 2, 4], 4:[1, 2, 3, 5], 5:[0, 4]}", "twice the number of lines of the graph num_indices =", "if pvert[v] < pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i += 2 g", "from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Return a list of elements", "of the metric v is a list of (base_i, gens_i,", "= [] if not af: for element in elements: if", "PermutationGroup \"\"\" Verify the normal closure of a subgroup/subset/element in", "_af_rmul v1 = [] for i in range(len(v)): base_i, gens_i,", "sym, size-2) if isinstance(sym, int): num_types = 1 dummies =", "prev = h return list(a[0]) def graph_certificate(gr): \"\"\" Return a", "======== >>> from sympy.combinatorics.testutil import _naive_list_centralizer >>> from sympy.combinatorics.named_groups import", "are other procedures for verifying a base and strong generating", "v.reverse() dummies = list(range(num_indices)) can = canonicalize(g, dummies, 0, *v)", "\"\"\" Canonicalize tensor formed by tensors of the different types", "4, 1]) >>> b = Permutation([1, 2, 0, 4, 3])", "dummy_sgs(dummies, sym, size-2) if isinstance(sym, int): num_types = 1 dummies", "in items, # the odd index to the other vertex", "canonicalize_naive(g, dummies, sym, *v): \"\"\" Canonicalize tensor formed by tensors", "def _verify_centralizer(group, arg, centr=None): \"\"\" Verify the centralizer of a", "is assumed to be unoriented and without external lines. Associate", "correspond to the same line of the graph. The canonical", "True See Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\" if centr", "10, 12, 3, 14, 16, 18, 5, 9, 15, 7,", "# associate to each vertex its indices; for each line", "element in elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for element in", "of the graph num_indices = 0 for v, neigh in", "its indices; for each line # between two vertices assign", "4, 3]) >>> c = Permutation([3, 4, 0, 1, 2])", "\"\"\" Compare two lists of permutations as sets. This is", ">>> _naive_list_centralizer(D, D) [Permutation([0, 1, 2, 3]), Permutation([2, 3, 0,", "the correctness of a base and strong generating set. This", "======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if closure is None: closure = group.normal_closure(arg)", "sympy.combinatorics.testutil import graph_certificate >>> gr1 = {0:[1, 2, 3, 5],", "sympy.combinatorics.util import _distribute_gens_by_base rmul = Permutation.rmul def _cmp_perm_lists(first, second): \"\"\"", "if closure is None: closure = group.normal_closure(arg) conjugates = set()", "assumed to be unoriented and without external lines. Associate to", "2, 3, 5], 1:[0, 2, 4], 2:[0, 1, 3, 4],", "of lines of the graph num_indices = 0 for v,", "for v in vertices: g.extend(v) assert len(g) == num_indices g", "af) def _verify_bsgs(group, base, gens): \"\"\" Verify the correctness of", "graph gr adjacency list The graph is assumed to be", "return the array form of the permutation representing the canonical", "b = Permutation([1, 2, 0, 4, 3]) >>> c =", "import _cmp_perm_lists >>> a = Permutation([0, 2, 3, 4, 1])", "return list(a[0]) def graph_certificate(gr): \"\"\" Return a certificate for the", "A.schreier_sims() >>> _verify_bsgs(A, A.base, A.strong_gens) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims", "AlternatingGroup) >>> from sympy.combinatorics.perm_groups import PermutationGroup >>> from sympy.combinatorics.permutations import", "n_i, sym_i) for tensors of type `i` base_i, gens_i BSGS", "Permutation, _af_rmul v1 = [] for i in range(len(v)): base_i,", "first in items, # the odd index to the other", "Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if closure is None: closure =", "+ 2 assert sorted(g) == list(range(size)) g = Permutation(g) vlen", "vertices: vlen[len(neigh)] += 1 v = [] for i in", "PermutationGroup strong_gens_distr = _distribute_gens_by_base(base, gens) current_stabilizer = group for i", "from sympy.combinatorics.permutations import _af_commutes_with if hasattr(other, 'generators'): elements = list(self.generate_dimino(af=True))", "of the graph. The canonical form of the tensor gives", "sym_i)) size, sbase, sgens = gens_products(*v1) dgens = dummy_sgs(dummies, sym,", "tensors of the different types g permutation representing the tensor", "5, 9, 15, 7, 11, 17, 13, 19, 20, 21]", "formed by tensors of the different types g permutation representing", "for v, neigh in items: num_indices += len(neigh) # associate", "by tensors of the different types g permutation representing the", "import _distribute_gens_by_base rmul = Permutation.rmul def _cmp_perm_lists(first, second): \"\"\" Compare", "degree of the vertex; indices are contracted when they correspond", "[] for i in range(len(v)): base_i, gens_i, n_i, sym_i =", "is a brute force implementation that goes over all elements", "= [[] for i in items] i = 0 for", "import PermutationGroup strong_gens_distr = _distribute_gens_by_base(base, gens) current_stabilizer = group for", "subgr_gens = [arg] for el in group.generate_dimino(): for gen in", "centr_list = list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group, arg, af=True) return _cmp_perm_lists(centr_list,", "vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i += 2 g = [] for v", "for testing purposes. Since the array form of a permutation", "metric v is a list of (base_i, gens_i, n_i, sym_i)", "!= prev[-1]: return 0 prev = h return list(a[0]) def", "gens_i, n_i, sym_i = v[i] v1.append((base_i, gens_i, [[]]*n_i, sym_i)) size,", ">>> from sympy.combinatorics.testutil import graph_certificate >>> gr1 = {0:[1, 2,", "gens, n, 0)) v.reverse() dummies = list(range(num_indices)) can = canonicalize(g,", "graph_certificate >>> gr1 = {0:[1, 2, 3, 5], 1:[0, 2,", "a brute force implementation that goes over all elements of", "for each line # between two vertices assign the #", "for i in range(len(vlen)): n = vlen[i] if n: base,", "3, 5], 5:[0, 4]} >>> gr2 = {0:[1, 5], 1:[0,", "to be unoriented and without external lines. Associate to each", "= [b, c, a] >>> _cmp_perm_lists(ls1, ls2) True \"\"\" return", "\"\"\" Verify the correctness of a base and strong generating", "form of the tensor. Examples ======== >>> from sympy.combinatorics.testutil import", "if commutes_with_gens(element): centralizer_list.append(element) return centralizer_list elif hasattr(other, 'getitem'): return _naive_list_centralizer(self,", "sympy.combinatorics.testutil import _verify_bsgs >>> A = AlternatingGroup(4) >>> A.schreier_sims() >>>", "ls2 = [b, c, a] >>> _cmp_perm_lists(ls1, ls2) True \"\"\"", "the centralizer of a subgroup/set/element. This is a brute force", "test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples ======== >>> from sympy.combinatorics.testutil import", "16, 18, 5, 9, 15, 7, 11, 17, 13, 19,", "== c2 True \"\"\" from sympy.combinatorics.permutations import _af_invert from sympy.combinatorics.tensor_can", "\"\"\" from sympy.combinatorics.permutations import _af_commutes_with if hasattr(other, 'generators'): elements =", "<reponame>ethankward/sympy<filename>sympy/combinatorics/testutil.py from sympy.combinatorics import Permutation from sympy.combinatorics.util import _distribute_gens_by_base rmul", "conjugates = set() if hasattr(arg, 'generators'): subgr_gens = arg.generators elif", "'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]), af) def _verify_bsgs(group, base, gens): \"\"\"", "4], 2:[0, 1, 3, 4], 3:[0, 2, 4], 4:[1, 2,", ">>> c2 = graph_certificate(gr2) >>> c1 [0, 2, 4, 6,", "from sympy.combinatorics.testutil import _verify_normal_closure >>> S = SymmetricGroup(3) >>> A", "is used for testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples ======== >>>", "3, 2, 0, 4, 5]) >>> base2, gens2 = get_symmetric_group_sgs(2)", "1 dummies = [dummies] sym = [sym] else: num_types =", "reverse=True) pvert = [x[0] for x in items] pvert =", "num_indices + 2 assert sorted(g) == list(range(size)) g = Permutation(g)", "the centralizer. It is used to test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``.", "c1 [0, 2, 4, 6, 1, 8, 10, 12, 3,", "PermutationGroup \"\"\" Return a list of elements for the centralizer", "is zero, else return the array form of the permutation", "under exchange of two component tensors of type `i` None", "sympy.combinatorics import Permutation, PermutationGroup >>> g = Permutation([1, 3, 2,", "the vertex which comes first in items, # the odd" ]
[ "localhost = \"http://localhost/\" # your local host database = \"mysql://root@localhost/vaticChecker\"", "videos to be considered recaptcha_secret = \"\" # recaptcha secret", "recaptcha secret for verification duplicate_annotations = False # Should the", "to be considered recaptcha_secret = \"\" # recaptcha secret for", "import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) # TODO: remove on server import os", "\"http://localhost/\" # your local host database = \"mysql://root@localhost/vaticChecker\" # server://user:pass@localhost/dbname", "for verification duplicate_annotations = False # Should the server allow", "allow for duplicate annotations? import os.path import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) #", "minimum number of training videos to be considered recaptcha_secret =", "= \"http://localhost/\" # your local host database = \"mysql://root@localhost/vaticChecker\" #", "training videos to be considered recaptcha_secret = \"\" # recaptcha", "False # Should the server allow for duplicate annotations? import", "import os.path import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) # TODO: remove on server", "server://user:pass@localhost/dbname min_training = 2 # the minimum number of training", "# the minimum number of training videos to be considered", "database = \"mysql://root@localhost/vaticChecker\" # server://user:pass@localhost/dbname min_training = 2 # the", "considered recaptcha_secret = \"\" # recaptcha secret for verification duplicate_annotations", "verification duplicate_annotations = False # Should the server allow for", "of training videos to be considered recaptcha_secret = \"\" #", "2 # the minimum number of training videos to be", "# TODO: remove on server import os os.environ['PYTHON_EGG_CACHE'] = '/tmp/apache'", "secret for verification duplicate_annotations = False # Should the server", "# recaptcha secret for verification duplicate_annotations = False # Should", "= False # Should the server allow for duplicate annotations?", "duplicate_annotations = False # Should the server allow for duplicate", "sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) # TODO: remove on server import os os.environ['PYTHON_EGG_CACHE']", "os.path import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) # TODO: remove on server import", "duplicate annotations? import os.path import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) # TODO: remove", "= \"\" # recaptcha secret for verification duplicate_annotations = False", "recaptcha_secret = \"\" # recaptcha secret for verification duplicate_annotations =", "be considered recaptcha_secret = \"\" # recaptcha secret for verification", "Should the server allow for duplicate annotations? import os.path import", "your local host database = \"mysql://root@localhost/vaticChecker\" # server://user:pass@localhost/dbname min_training =", "server allow for duplicate annotations? import os.path import sys sys.path.append(os.path.dirname(os.path.abspath(__file__)))", "host database = \"mysql://root@localhost/vaticChecker\" # server://user:pass@localhost/dbname min_training = 2 #", "# your local host database = \"mysql://root@localhost/vaticChecker\" # server://user:pass@localhost/dbname min_training", "= 2 # the minimum number of training videos to", "= \"mysql://root@localhost/vaticChecker\" # server://user:pass@localhost/dbname min_training = 2 # the minimum", "local host database = \"mysql://root@localhost/vaticChecker\" # server://user:pass@localhost/dbname min_training = 2", "sys.path.append(os.path.dirname(os.path.abspath(__file__))) # TODO: remove on server import os os.environ['PYTHON_EGG_CACHE'] =", "the server allow for duplicate annotations? import os.path import sys", "for duplicate annotations? import os.path import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) # TODO:", "annotations? import os.path import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) # TODO: remove on", "# server://user:pass@localhost/dbname min_training = 2 # the minimum number of", "\"\" # recaptcha secret for verification duplicate_annotations = False #", "the minimum number of training videos to be considered recaptcha_secret", "number of training videos to be considered recaptcha_secret = \"\"", "\"mysql://root@localhost/vaticChecker\" # server://user:pass@localhost/dbname min_training = 2 # the minimum number", "# Should the server allow for duplicate annotations? import os.path", "min_training = 2 # the minimum number of training videos" ]
[ "value def now(): \"\"\" Returns an aware or naive datetime.datetime,", "by settings.TIME_ZONE. \"\"\" if hasattr(_active, \"value\"): del _active.value class override(object):", "by the current time zone, unless another time zone is", "= timezone elif isinstance(timezone, basestring) and pytz is not None:", "the previously active timezone on exit. The ``timezone`` argument must", "None, that will force the value to be converted (or", "zone for the current thread. The ``timezone`` argument must be", "Used only when pytz isn't available. \"\"\" def __repr__(self): return", "The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return", "to be converted (or not), overriding the value of settings.USE_TZ.", "of settings.USE_TZ. This function is designed for use by the", "value is a datetime and converts it to local time", "that uses ``~django.utils.timezone.activate()`` to set the timezone on entry, and", "try: import pytz except ImportError: pytz = None from django.conf", "self.STDOFFSET = timedelta(seconds=-_time.timezone) if _time.daylight: self.DSTOFFSET = timedelta(seconds=-_time.altzone) else: self.DSTOFFSET", "previously active timezone on exit. The ``timezone`` argument must be", "time zone name, pytz is required. If it is ``None``,", "is_aware(value): \"\"\" Determines if a given datetime.datetime is aware. The", "described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is not", "The ``timezone`` argument must be an instance of a tzinfo", "naive in a given time zone. \"\"\" value = value.astimezone(timezone)", "current thread. Django will then use the time zone defined", "is a context manager that uses ``~django.utils.timezone.activate()`` to set the", "should_convert = (isinstance(value, datetime) and (settings.USE_TZ if use_tz is None", "\"\"\" should_convert = (isinstance(value, datetime) and (settings.USE_TZ if use_tz is", "\"\"\" def __repr__(self): return \"<UTC>\" def utcoffset(self, dt): return ZERO", "return value.replace(tzinfo=timezone) def make_naive(value, timezone): \"\"\" Makes an aware datetime.datetime", "time implementation taken from Python's docs. Used only when pytz", "provided and is not None, that will force the value", "utcoffset(self, dt): if self._isdst(dt): return self.DSTOFFSET else: return self.STDOFFSET def", "_get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone): \"\"\" Returns the name of ``timezone``. \"\"\"", "current time zone, unless another time zone is specified. \"\"\"", "# wrap the expression in a function and cache the", "a time zone name, or ``None``. If is it a", "This module uses pytz when it's available and fallbacks when", "Unsets the time zone for the current thread. Django will", "not), overriding the value of settings.USE_TZ. This function is designed", "value.tzinfo is None or value.tzinfo.utcoffset(value) is None def make_aware(value, timezone):", "if _localtime is None: if isinstance(settings.TIME_ZONE, basestring) and pytz is", "\"\"\" Returns the currently active time zone as a tzinfo", "zones ZERO = timedelta(0) class UTC(tzinfo): \"\"\" UTC implementation taken", "return ZERO class LocalTimezone(tzinfo): \"\"\" Local time implementation taken from", "(isinstance(value, datetime) and (settings.USE_TZ if use_tz is None else use_tz)", "is_naive(value) and getattr(value, 'convert_to_local_time', True)) return localtime(value) if should_convert else", "zone defined by settings.TIME_ZONE. \"\"\" if hasattr(_active, \"value\"): del _active.value", "the currently active time zone as a tzinfo instance. \"\"\"", "available, and most likely inaccurate. If you're having trouble with", "return getattr(_active, \"value\", get_default_timezone()) def get_current_timezone_name(): \"\"\" Returns the name", "dt.weekday(), 0, 0) stamp = _time.mktime(tt) tt = _time.localtime(stamp) return", "else: return self.STDOFFSET def dst(self, dt): if self._isdst(dt): return self.DSTDIFF", "= [ 'utc', 'get_default_timezone', 'get_current_timezone', 'activate', 'deactivate', 'override', 'is_naive', 'is_aware',", "the name of the currently active time zone. \"\"\" return", "avoid accessing the settings at compile time, # wrap the", "manager that uses ``~django.utils.timezone.activate()`` to set the timezone on entry,", "is not None: _localtime = pytz.timezone(settings.TIME_ZONE) else: _localtime = LocalTimezone()", "engine. \"\"\" should_convert = (isinstance(value, datetime) and (settings.USE_TZ if use_tz", "time zone name. If it is a time zone name,", "deactivate() else: activate(self.timezone) def __exit__(self, exc_type, exc_value, traceback): if self.old_timezone", "a given datetime.datetime is naive. The logic is described in", "return ZERO def tzname(self, dt): return \"UTC\" def dst(self, dt):", "active time zone. \"\"\" return _get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone): \"\"\" Returns", "dt): return ZERO class LocalTimezone(tzinfo): \"\"\" Local time implementation taken", "time as _time try: import pytz except ImportError: pytz =", "on settings.USE_TZ. \"\"\" if settings.USE_TZ: # timeit shows that datetime.now(tz=utc)", "overriding the value of settings.USE_TZ. This function is designed for", "return \"<LocalTimezone>\" def utcoffset(self, dt): if self._isdst(dt): return self.DSTOFFSET else:", "_get_timezone_name(get_default_timezone()) _active = local() def get_current_timezone(): \"\"\" Returns the currently", "_active.value # Templates def template_localtime(value, use_tz=None): \"\"\" Checks if value", "and call time.tzset() # because it isn't thread safe. def", "time zones value = timezone.normalize(value) return value def now(): \"\"\"", "__exit__(self, exc_type, exc_value, traceback): if self.old_timezone is not None: _active.value", "If you're having trouble with this class, don't waste your", "name, pytz is required. If it is ``None``, Django enables", "return _get_timezone_name(get_default_timezone()) _active = local() def get_current_timezone(): \"\"\" Returns the", "os.environ['TZ'] and call time.tzset() # because it isn't thread safe.", "is not None: _active.value = pytz.timezone(timezone) else: raise ValueError(\"Invalid timezone:", "for pytz timezones return timezone.zone except AttributeError: # for regular", "'override', 'is_naive', 'is_aware', 'make_aware', 'make_naive', ] # UTC and local", "import time as _time try: import pytz except ImportError: pytz", "receive an invalid value like None. def is_aware(value): \"\"\" Determines", "the current thread. The ``timezone`` argument must be an instance", "objects local_now = datetime.now(timezone) return timezone.tzname(local_now) # Timezone selection functions.", "subclass, a time zone name, or ``None``. If is it", "self.DSTOFFSET = self.STDOFFSET self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET tzinfo.__init__(self) def", "on entry, and restores the previously active timezone on exit.", "None and value.tzinfo.utcoffset(value) is not None def is_naive(value): \"\"\" Determines", "of the currently active time zone. \"\"\" return _get_timezone_name(get_current_timezone()) def", "safe. def activate(timezone): \"\"\" Sets the time zone for the", "pytz. \"\"\" def __init__(self): # This code is moved in", "= local() def get_current_timezone(): \"\"\" Returns the currently active time", "if timezone is None: timezone = get_current_timezone() value = value.astimezone(timezone)", "\"\"\" Converts an aware datetime.datetime to local time. Local time", "is_naive(value): \"\"\" Determines if a given datetime.datetime is naive. The", "default time zone. \"\"\" def __init__(self, timezone): self.timezone = timezone", "\"\"\" if hasattr(timezone, 'localize'): # available for pytz time zones", "for pytz time zones return timezone.localize(value, is_dst=None) else: # may", "See get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone) if _time.daylight: self.DSTOFFSET = timedelta(seconds=-_time.altzone)", "\"\"\" return _get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone): \"\"\" Returns the name of", "zone. \"\"\" return _get_timezone_name(get_default_timezone()) _active = local() def get_current_timezone(): \"\"\"", "their arguments. # The caller should ensure that they don't", "None def make_aware(value, timezone): \"\"\" Makes a naive datetime.datetime in", "= None from django.conf import settings __all__ = [ 'utc',", "that datetime.now(tz=utc) is 24% slower return datetime.utcnow().replace(tzinfo=utc) else: return datetime.now()", "else: raise ValueError(\"Invalid timezone: %r\" % timezone) def deactivate(): \"\"\"", "not is_naive(value) and getattr(value, 'convert_to_local_time', True)) return localtime(value) if should_convert", "zone as a tzinfo instance.\"\"\" # In order to avoid", "taken from Python's docs. Used only when pytz isn't available,", "zone, unless another time zone is specified. \"\"\" if timezone", "change os.environ['TZ'] and call time.tzset() # because it isn't thread", "local time. Local time is defined by the current time", "def get_current_timezone_name(): \"\"\" Returns the name of the currently active", "time zone as a tzinfo instance. \"\"\" return getattr(_active, \"value\",", "self.old_timezone = getattr(_active, 'value', None) def __enter__(self): if self.timezone is", "aware datetime.datetime naive in a given time zone. \"\"\" value", "and getattr(value, 'convert_to_local_time', True)) return localtime(value) if should_convert else value", "value.replace(tzinfo=timezone) def make_naive(value, timezone): \"\"\" Makes an aware datetime.datetime naive", "given time zone aware. \"\"\" if hasattr(timezone, 'localize'): # available", "None: if isinstance(settings.TIME_ZONE, basestring) and pytz is not None: _localtime", "don't receive an invalid value like None. def is_aware(value): \"\"\"", "The caller should ensure that they don't receive an invalid", "any checks on their arguments. # The caller should ensure", "if self._isdst(dt): return self.DSTDIFF else: return ZERO def tzname(self, dt):", "name of the default time zone. \"\"\" return _get_timezone_name(get_default_timezone()) _active", "Returns the name of the default time zone. \"\"\" return", "\"\"\" Determines if a given datetime.datetime is aware. The logic", "value.tzinfo.utcoffset(value) is not None def is_naive(value): \"\"\" Determines if a", "use_tz=None): \"\"\" Checks if value is a datetime and converts", "else value # Utilities def localtime(value, timezone=None): \"\"\" Converts an", "in a function and cache the result. _localtime = None", "uses ``~django.utils.timezone.activate()`` to set the timezone on entry, and restores", "def __repr__(self): return \"<LocalTimezone>\" def utcoffset(self, dt): if self._isdst(dt): return", "settings.USE_TZ. This function is designed for use by the template", "they don't receive an invalid value like None. def is_aware(value):", "settings.TIME_ZONE. \"\"\" if hasattr(_active, \"value\"): del _active.value class override(object): \"\"\"", "time.tzset() # because it isn't thread safe. def activate(timezone): \"\"\"", "expression in a function and cache the result. _localtime =", "def _get_timezone_name(timezone): \"\"\" Returns the name of ``timezone``. \"\"\" try:", "in __init__ to execute it as late as possible #", "specified. \"\"\" if timezone is None: timezone = get_current_timezone() value", "be an instance of a tzinfo subclass or a time", "the expression in a function and cache the result. _localtime", "aware or naive datetime.datetime, depending on settings.USE_TZ. \"\"\" if settings.USE_TZ:", "context manager that uses ``~django.utils.timezone.activate()`` to set the timezone on", "and converts it to local time if necessary. If use_tz", "the current time zone, unless another time zone is specified.", "None. def is_aware(value): \"\"\" Determines if a given datetime.datetime is", "import datetime, timedelta, tzinfo from threading import local import time", "def __repr__(self): return \"<UTC>\" def utcoffset(self, dt): return ZERO def", "will then use the time zone defined by settings.TIME_ZONE. \"\"\"", "value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available for pytz time zones", "waste your time, just install pytz. \"\"\" def __init__(self): #", "return \"UTC\" def dst(self, dt): return ZERO class LocalTimezone(tzinfo): \"\"\"", "zones return timezone.localize(value, is_dst=None) else: # may be wrong around", "ValueError(\"Invalid timezone: %r\" % timezone) def deactivate(): \"\"\" Unsets the", "_active = local() def get_current_timezone(): \"\"\" Returns the currently active", "must be an instance of a ``tzinfo`` subclass, a time", "else use_tz) and not is_naive(value) and getattr(value, 'convert_to_local_time', True)) return", "value = timezone.normalize(value) return value def now(): \"\"\" Returns an", "return value.tzinfo is None or value.tzinfo.utcoffset(value) is None def make_aware(value,", "a time zone name. If it is a time zone", "# The caller should ensure that they don't receive an", "def __init__(self, timezone): self.timezone = timezone self.old_timezone = getattr(_active, 'value',", "the timezone on entry, and restores the previously active timezone", "getattr(_active, \"value\", get_default_timezone()) def get_current_timezone_name(): \"\"\" Returns the name of", "instance of a tzinfo subclass or a time zone name.", "self.timezone = timezone self.old_timezone = getattr(_active, 'value', None) def __enter__(self):", "another time zone is specified. \"\"\" if timezone is None:", "in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is not None", "by settings.TIME_ZONE. See also :func:`get_current_timezone`. \"\"\" global _localtime if _localtime", "datetime.datetime in a given time zone aware. \"\"\" if hasattr(timezone,", "instance.\"\"\" # In order to avoid accessing the settings at", "value of settings.USE_TZ. This function is designed for use by", "\"value\"): del _active.value class override(object): \"\"\" Temporarily set the time", ":func:`get_current_timezone`. \"\"\" global _localtime if _localtime is None: if isinstance(settings.TIME_ZONE,", "del _active.value class override(object): \"\"\" Temporarily set the time zone", "return self.DSTOFFSET else: return self.STDOFFSET def dst(self, dt): if self._isdst(dt):", "subclass or a time zone name. If it is a", "on exit. The ``timezone`` argument must be an instance of", "timezone.localize(value, is_dst=None) else: # may be wrong around DST changes", "not None and value.tzinfo.utcoffset(value) is not None def is_naive(value): \"\"\"", "functions don't change os.environ['TZ'] and call time.tzset() # because it", "active timezone on exit. The ``timezone`` argument must be an", "\"<UTC>\" def utcoffset(self, dt): return ZERO def tzname(self, dt): return", "four functions don't perform any checks on their arguments. #", "\"\"\" def __init__(self): # This code is moved in __init__", "is None: if isinstance(settings.TIME_ZONE, basestring) and pytz is not None:", "else: activate(self.timezone) def __exit__(self, exc_type, exc_value, traceback): if self.old_timezone is", "except ImportError: pytz = None from django.conf import settings __all__", "else UTC() \"\"\"UTC time zone as a tzinfo instance.\"\"\" #", "name of the currently active time zone. \"\"\" return _get_timezone_name(get_current_timezone())", "\"<LocalTimezone>\" def utcoffset(self, dt): if self._isdst(dt): return self.DSTOFFSET else: return", "an aware or naive datetime.datetime, depending on settings.USE_TZ. \"\"\" if", "settings.USE_TZ: # timeit shows that datetime.now(tz=utc) is 24% slower return", "dst(self, dt): if self._isdst(dt): return self.DSTDIFF else: return ZERO def", "Converts an aware datetime.datetime to local time. Local time is", "] # UTC and local time zones ZERO = timedelta(0)", "zone for the current thread. This is a context manager", "return timezone.localize(value, is_dst=None) else: # may be wrong around DST", "and most likely inaccurate. If you're having trouble with this", "else: _localtime = LocalTimezone() return _localtime # This function exists", "self.STDOFFSET self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET tzinfo.__init__(self) def __repr__(self): return", "default time zone as a tzinfo instance. This is the", "defined by settings.TIME_ZONE. See also :func:`get_current_timezone`. \"\"\" global _localtime if", "- self.STDOFFSET tzinfo.__init__(self) def __repr__(self): return \"<LocalTimezone>\" def utcoffset(self, dt):", "it is a time zone name, pytz is required. \"\"\"", "return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None", "is None or value.tzinfo.utcoffset(value) is None def make_aware(value, timezone): \"\"\"", "pytz time zones return timezone.localize(value, is_dst=None) else: # may be", "= timezone.normalize(value) return value def now(): \"\"\" Returns an aware", "if _time.daylight: self.DSTOFFSET = timedelta(seconds=-_time.altzone) else: self.DSTOFFSET = self.STDOFFSET self.DSTDIFF", "= (isinstance(value, datetime) and (settings.USE_TZ if use_tz is None else", "= timedelta(0) class UTC(tzinfo): \"\"\" UTC implementation taken from Python's", "'convert_to_local_time', True)) return localtime(value) if should_convert else value # Utilities", "# timeit shows that datetime.now(tz=utc) is 24% slower return datetime.utcnow().replace(tzinfo=utc)", "= _time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst > 0 utc", "self.DSTOFFSET = timedelta(seconds=-_time.altzone) else: self.DSTOFFSET = self.STDOFFSET self.DSTDIFF = self.DSTOFFSET", "hasattr(timezone, 'localize'): # available for pytz time zones return timezone.localize(value,", "time zone for the current thread. This is a context", "use the time zone defined by settings.TIME_ZONE. \"\"\" if hasattr(_active,", "_localtime = None def get_default_timezone(): \"\"\" Returns the default time", "24% slower return datetime.utcnow().replace(tzinfo=utc) else: return datetime.now() # By design,", "dt.hour, dt.minute, dt.second, dt.weekday(), 0, 0) stamp = _time.mktime(tt) tt", "consistency with get_current_timezone_name def get_default_timezone_name(): \"\"\" Returns the name of", "None: _active.value = self.old_timezone else: del _active.value # Templates def", "Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is not None and", "None) def __enter__(self): if self.timezone is None: deactivate() else: activate(self.timezone)", "a function and cache the result. _localtime = None def", "'localize'): # available for pytz time zones return timezone.localize(value, is_dst=None)", "Returns the currently active time zone as a tzinfo instance.", "def __enter__(self): if self.timezone is None: deactivate() else: activate(self.timezone) def", "def make_aware(value, timezone): \"\"\" Makes a naive datetime.datetime in a", "current thread. This is a context manager that uses ``~django.utils.timezone.activate()``", "self.STDOFFSET def dst(self, dt): if self._isdst(dt): return self.DSTDIFF else: return", "aware datetime.datetime to local time. Local time is defined by", "helper functions. This module uses pytz when it's available and", "__repr__(self): return \"<LocalTimezone>\" def utcoffset(self, dt): if self._isdst(dt): return self.DSTOFFSET", "to execute it as late as possible # See get_default_timezone().", "a tzinfo instance. \"\"\" return getattr(_active, \"value\", get_default_timezone()) def get_current_timezone_name():", "__init__ to execute it as late as possible # See", "dt): if self._isdst(dt): return self.DSTOFFSET else: return self.STDOFFSET def dst(self,", "it a time zone name, pytz is required. If it", "pytz = None from django.conf import settings __all__ = [", "to set the timezone on entry, and restores the previously", "not None: _active.value = self.old_timezone else: del _active.value # Templates", "and is not None, that will force the value to", "perform any checks on their arguments. # The caller should", "is not None and value.tzinfo.utcoffset(value) is not None def is_naive(value):", "aware. \"\"\" if hasattr(timezone, 'localize'): # available for pytz time", "= pytz.timezone(settings.TIME_ZONE) else: _localtime = LocalTimezone() return _localtime # This", "except AttributeError: # for regular tzinfo objects local_now = datetime.now(timezone)", "By design, these four functions don't perform any checks on", "def dst(self, dt): if self._isdst(dt): return self.DSTDIFF else: return ZERO", "function exists for consistency with get_current_timezone_name def get_default_timezone_name(): \"\"\" Returns", "\"\"\" Makes a naive datetime.datetime in a given time zone", "when pytz isn't available. \"\"\" def __repr__(self): return \"<UTC>\" def", "def tzname(self, dt): return \"UTC\" def dst(self, dt): return ZERO", "self._isdst(dt): return self.DSTDIFF else: return ZERO def tzname(self, dt): return", "timedelta(seconds=-_time.altzone) else: self.DSTOFFSET = self.STDOFFSET self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET", "time zone. \"\"\" def __init__(self, timezone): self.timezone = timezone self.old_timezone", "datetime.now(timezone) return timezone.tzname(local_now) # Timezone selection functions. # These functions", "taken from Python's docs. Used only when pytz isn't available.", "= (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, 0)", "datetime, timedelta, tzinfo from threading import local import time as", "local() def get_current_timezone(): \"\"\" Returns the currently active time zone", "'is_aware', 'make_aware', 'make_naive', ] # UTC and local time zones", "a context manager that uses ``~django.utils.timezone.activate()`` to set the timezone", "available for pytz time zones value = timezone.normalize(value) return value", "tzinfo): _active.value = timezone elif isinstance(timezone, basestring) and pytz is", "active time zone as a tzinfo instance. \"\"\" return getattr(_active,", "exc_value, traceback): if self.old_timezone is not None: _active.value = self.old_timezone", "_active.value class override(object): \"\"\" Temporarily set the time zone for", "activate(timezone): \"\"\" Sets the time zone for the current thread.", "__repr__(self): return \"<UTC>\" def utcoffset(self, dt): return ZERO def tzname(self,", "the current thread. Django will then use the time zone", "or value.tzinfo.utcoffset(value) is None def make_aware(value, timezone): \"\"\" Makes a", "thread. The ``timezone`` argument must be an instance of a", "the time zone defined by settings.TIME_ZONE. \"\"\" if hasattr(_active, \"value\"):", "an aware datetime.datetime to local time. Local time is defined", "in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is None or", "pytz isn't available. \"\"\" def __repr__(self): return \"<UTC>\" def utcoffset(self,", "if necessary. If use_tz is provided and is not None,", "is 24% slower return datetime.utcnow().replace(tzinfo=utc) else: return datetime.now() # By", "Python's docs. Used only when pytz isn't available, and most", "tzinfo instance. \"\"\" return getattr(_active, \"value\", get_default_timezone()) def get_current_timezone_name(): \"\"\"", "\"\"\" Temporarily set the time zone for the current thread.", "tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt = (dt.year,", "order to avoid accessing the settings at compile time, #", "may be wrong around DST changes return value.replace(tzinfo=timezone) def make_naive(value,", "the time zone for the current thread. This is a", "ZERO def tzname(self, dt): return \"UTC\" def dst(self, dt): return", "dt.second, dt.weekday(), 0, 0) stamp = _time.mktime(tt) tt = _time.localtime(stamp)", "pytz is not None: _active.value = pytz.timezone(timezone) else: raise ValueError(\"Invalid", "__all__ = [ 'utc', 'get_default_timezone', 'get_current_timezone', 'activate', 'deactivate', 'override', 'is_naive',", "don't perform any checks on their arguments. # The caller", "= timedelta(seconds=-_time.timezone) if _time.daylight: self.DSTOFFSET = timedelta(seconds=-_time.altzone) else: self.DSTOFFSET =", "an invalid value like None. def is_aware(value): \"\"\" Determines if", "get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone) if _time.daylight: self.DSTOFFSET = timedelta(seconds=-_time.altzone) else:", "inaccurate. If you're having trouble with this class, don't waste", "default time zone. \"\"\" return _get_timezone_name(get_default_timezone()) _active = local() def", "time zone defined by settings.TIME_ZONE. \"\"\" if hasattr(_active, \"value\"): del", "name, or ``None``. If is it a time zone name,", "is the time zone defined by settings.TIME_ZONE. See also :func:`get_current_timezone`.", "else: self.DSTOFFSET = self.STDOFFSET self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET tzinfo.__init__(self)", "thread safe. def activate(timezone): \"\"\" Sets the time zone for", "use_tz is None else use_tz) and not is_naive(value) and getattr(value,", "slower return datetime.utcnow().replace(tzinfo=utc) else: return datetime.now() # By design, these", "given time zone. \"\"\" value = value.astimezone(timezone) if hasattr(timezone, 'normalize'):", "get_current_timezone() value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available for", "\"\"\" return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not", "\"\"\" if isinstance(timezone, tzinfo): _active.value = timezone elif isinstance(timezone, basestring)", "required. If it is ``None``, Django enables the default time", "that they don't receive an invalid value like None. def", "def utcoffset(self, dt): return ZERO def tzname(self, dt): return \"UTC\"", "accessing the settings at compile time, # wrap the expression", "value # Utilities def localtime(value, timezone=None): \"\"\" Converts an aware", "arguments. # The caller should ensure that they don't receive", "tt = _time.localtime(stamp) return tt.tm_isdst > 0 utc = pytz.utc", "is naive. The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo", "is None else use_tz) and not is_naive(value) and getattr(value, 'convert_to_local_time',", "pytz time zones value = timezone.normalize(value) return value def now():", "zone name, or ``None``. If is it a time zone", "return \"<UTC>\" def utcoffset(self, dt): return ZERO def tzname(self, dt):", "\"\"\" if hasattr(_active, \"value\"): del _active.value class override(object): \"\"\" Temporarily", "\"\"\" if settings.USE_TZ: # timeit shows that datetime.now(tz=utc) is 24%", "these four functions don't perform any checks on their arguments.", "zone defined by settings.TIME_ZONE. See also :func:`get_current_timezone`. \"\"\" global _localtime", "not None, that will force the value to be converted", "pytz is required. If it is ``None``, Django enables the", "or a time zone name. If it is a time", "regular tzinfo objects local_now = datetime.now(timezone) return timezone.tzname(local_now) # Timezone", "fallbacks when it isn't. \"\"\" from datetime import datetime, timedelta,", "'normalize'): # available for pytz time zones value = timezone.normalize(value)", "_active.value = self.old_timezone else: del _active.value # Templates def template_localtime(value,", "naive. The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\"", "datetime.datetime, depending on settings.USE_TZ. \"\"\" if settings.USE_TZ: # timeit shows", "is aware. The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo", "pytz.timezone(timezone) else: raise ValueError(\"Invalid timezone: %r\" % timezone) def deactivate():", "local import time as _time try: import pytz except ImportError:", "zone name, pytz is required. \"\"\" if isinstance(timezone, tzinfo): _active.value", "datetime.datetime is aware. The logic is described in Python's docs:", "available and fallbacks when it isn't. \"\"\" from datetime import", "time zone defined by settings.TIME_ZONE. See also :func:`get_current_timezone`. \"\"\" global", "instance of a ``tzinfo`` subclass, a time zone name, or", "pytz.utc if pytz else UTC() \"\"\"UTC time zone as a", "possible # See get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone) if _time.daylight: self.DSTOFFSET", "if hasattr(_active, \"value\"): del _active.value class override(object): \"\"\" Temporarily set", "result. _localtime = None def get_default_timezone(): \"\"\" Returns the default", "zone for the current thread. Django will then use the", "with this class, don't waste your time, just install pytz.", "is None: deactivate() else: activate(self.timezone) def __exit__(self, exc_type, exc_value, traceback):", "described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is None", "dt): if self._isdst(dt): return self.DSTDIFF else: return ZERO def tzname(self,", "exists for consistency with get_current_timezone_name def get_default_timezone_name(): \"\"\" Returns the", "you're having trouble with this class, don't waste your time,", "tzinfo from threading import local import time as _time try:", "the name of ``timezone``. \"\"\" try: # for pytz timezones", "elif isinstance(timezone, basestring) and pytz is not None: _active.value =", "not None: _localtime = pytz.timezone(settings.TIME_ZONE) else: _localtime = LocalTimezone() return", "aware. The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\"", "isn't. \"\"\" from datetime import datetime, timedelta, tzinfo from threading", "def template_localtime(value, use_tz=None): \"\"\" Checks if value is a datetime", "self.DSTDIFF else: return ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)] def", "timezone.tzname(local_now) # Timezone selection functions. # These functions don't change", "and restores the previously active timezone on exit. The ``timezone``", "'get_current_timezone', 'activate', 'deactivate', 'override', 'is_naive', 'is_aware', 'make_aware', 'make_naive', ] #", "current thread. The ``timezone`` argument must be an instance of", "if value is a datetime and converts it to local", "_localtime = LocalTimezone() return _localtime # This function exists for", "threading import local import time as _time try: import pytz", "Django will then use the time zone defined by settings.TIME_ZONE.", "the default time zone. \"\"\" return _get_timezone_name(get_default_timezone()) _active = local()", "functions. # These functions don't change os.environ['TZ'] and call time.tzset()", "def get_current_timezone(): \"\"\" Returns the currently active time zone as", "time zone name, or ``None``. If is it a time", "\"\"\" if timezone is None: timezone = get_current_timezone() value =", "# Utilities def localtime(value, timezone=None): \"\"\" Converts an aware datetime.datetime", "# Templates def template_localtime(value, use_tz=None): \"\"\" Checks if value is", "timezone): \"\"\" Makes a naive datetime.datetime in a given time", "\"\"\" value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available for", "Django enables the default time zone. \"\"\" def __init__(self, timezone):", "timedelta, tzinfo from threading import local import time as _time", "defined by settings.TIME_ZONE. \"\"\" if hasattr(_active, \"value\"): del _active.value class", "should ensure that they don't receive an invalid value like", "is provided and is not None, that will force the", "UTC and local time zones ZERO = timedelta(0) class UTC(tzinfo):", "as late as possible # See get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone)", "= pytz.utc if pytz else UTC() \"\"\"UTC time zone as", "``~django.utils.timezone.activate()`` to set the timezone on entry, and restores the", "\"\"\" Returns the name of ``timezone``. \"\"\" try: # for", "checks on their arguments. # The caller should ensure that", "value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None def", "= self.DSTOFFSET - self.STDOFFSET tzinfo.__init__(self) def __repr__(self): return \"<LocalTimezone>\" def", "currently active time zone as a tzinfo instance. \"\"\" return", "argument must be an instance of a ``tzinfo`` subclass, a", "exc_type, exc_value, traceback): if self.old_timezone is not None: _active.value =", "as possible # See get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone) if _time.daylight:", "= LocalTimezone() return _localtime # This function exists for consistency", "%r\" % timezone) def deactivate(): \"\"\" Unsets the time zone", "\"\"\" UTC implementation taken from Python's docs. Used only when", "return tt.tm_isdst > 0 utc = pytz.utc if pytz else", "If it is a time zone name, pytz is required.", "dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, 0) stamp =", "when it isn't. \"\"\" from datetime import datetime, timedelta, tzinfo", "\"\"\" global _localtime if _localtime is None: if isinstance(settings.TIME_ZONE, basestring)", "from datetime import datetime, timedelta, tzinfo from threading import local", "pytz timezones return timezone.zone except AttributeError: # for regular tzinfo", "is None: timezone = get_current_timezone() value = value.astimezone(timezone) if hasattr(timezone,", "# UTC and local time zones ZERO = timedelta(0) class", "and pytz is not None: _active.value = pytz.timezone(timezone) else: raise", "instance. \"\"\" return getattr(_active, \"value\", get_default_timezone()) def get_current_timezone_name(): \"\"\" Returns", "return self.STDOFFSET def dst(self, dt): if self._isdst(dt): return self.DSTDIFF else:", "as a tzinfo instance. \"\"\" return getattr(_active, \"value\", get_default_timezone()) def", "Makes a naive datetime.datetime in a given time zone aware.", "as _time try: import pytz except ImportError: pytz = None", "is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is", "http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is None or value.tzinfo.utcoffset(value) is None", "= getattr(_active, 'value', None) def __enter__(self): if self.timezone is None:", "time zone. \"\"\" value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): #", "for the current thread. This is a context manager that", "thread. Django will then use the time zone defined by", "datetime.utcnow().replace(tzinfo=utc) else: return datetime.now() # By design, these four functions", "_time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst > 0 utc =", "of a ``tzinfo`` subclass, a time zone name, or ``None``.", "depending on settings.USE_TZ. \"\"\" if settings.USE_TZ: # timeit shows that", "only when pytz isn't available, and most likely inaccurate. If", "``timezone``. \"\"\" try: # for pytz timezones return timezone.zone except", "get_current_timezone(): \"\"\" Returns the currently active time zone as a", "\"\"\" Local time implementation taken from Python's docs. Used only", "and pytz is not None: _localtime = pytz.timezone(settings.TIME_ZONE) else: _localtime", "get_default_timezone()) def get_current_timezone_name(): \"\"\" Returns the name of the currently", "Returns the name of the currently active time zone. \"\"\"", "zone aware. \"\"\" if hasattr(timezone, 'localize'): # available for pytz", "return timezone.tzname(local_now) # Timezone selection functions. # These functions don't", "'deactivate', 'override', 'is_naive', 'is_aware', 'make_aware', 'make_naive', ] # UTC and", "required. \"\"\" if isinstance(timezone, tzinfo): _active.value = timezone elif isinstance(timezone,", "if use_tz is None else use_tz) and not is_naive(value) and", "zone name, pytz is required. If it is ``None``, Django", "= None def get_default_timezone(): \"\"\" Returns the default time zone", "pytz when it's available and fallbacks when it isn't. \"\"\"", "= value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available for pytz time", "def activate(timezone): \"\"\" Sets the time zone for the current", "converts it to local time if necessary. If use_tz is", "See also :func:`get_current_timezone`. \"\"\" global _localtime if _localtime is None:", "return _get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone): \"\"\" Returns the name of ``timezone``.", "don't waste your time, just install pytz. \"\"\" def __init__(self):", "None: deactivate() else: activate(self.timezone) def __exit__(self, exc_type, exc_value, traceback): if", "selection functions. # These functions don't change os.environ['TZ'] and call", "caller should ensure that they don't receive an invalid value", "datetime.now(tz=utc) is 24% slower return datetime.utcnow().replace(tzinfo=utc) else: return datetime.now() #", "import local import time as _time try: import pytz except", "try: # for pytz timezones return timezone.zone except AttributeError: #", "If it is ``None``, Django enables the default time zone.", "def is_aware(value): \"\"\" Determines if a given datetime.datetime is aware.", "from threading import local import time as _time try: import", "Determines if a given datetime.datetime is aware. The logic is", "'is_naive', 'is_aware', 'make_aware', 'make_naive', ] # UTC and local time", "not None def is_naive(value): \"\"\" Determines if a given datetime.datetime", "not None: _active.value = pytz.timezone(timezone) else: raise ValueError(\"Invalid timezone: %r\"", "and local time zones ZERO = timedelta(0) class UTC(tzinfo): \"\"\"", "uses pytz when it's available and fallbacks when it isn't.", "of the default time zone. \"\"\" return _get_timezone_name(get_default_timezone()) _active =", "a datetime and converts it to local time if necessary.", "Returns the name of ``timezone``. \"\"\" try: # for pytz", "self.timezone is None: deactivate() else: activate(self.timezone) def __exit__(self, exc_type, exc_value,", "converted (or not), overriding the value of settings.USE_TZ. This function", "get_default_timezone(): \"\"\" Returns the default time zone as a tzinfo", "function is designed for use by the template engine. \"\"\"", "given datetime.datetime is naive. The logic is described in Python's", "timezone elif isinstance(timezone, basestring) and pytz is not None: _active.value", "currently active time zone. \"\"\" return _get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone): \"\"\"", "pytz else UTC() \"\"\"UTC time zone as a tzinfo instance.\"\"\"", "be converted (or not), overriding the value of settings.USE_TZ. This", "time zone as a tzinfo instance. This is the time", "\"\"\" Determines if a given datetime.datetime is naive. The logic", "tt.tm_isdst > 0 utc = pytz.utc if pytz else UTC()", "traceback): if self.old_timezone is not None: _active.value = self.old_timezone else:", "for pytz time zones value = timezone.normalize(value) return value def", "isn't available, and most likely inaccurate. If you're having trouble", "moved in __init__ to execute it as late as possible", "functions. This module uses pytz when it's available and fallbacks", "\"\"\" try: # for pytz timezones return timezone.zone except AttributeError:", "_time.tzname[self._isdst(dt)] def _isdst(self, dt): tt = (dt.year, dt.month, dt.day, dt.hour,", "return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt = (dt.year, dt.month, dt.day,", "get_current_timezone_name def get_default_timezone_name(): \"\"\" Returns the name of the default", "time zone for the current thread. The ``timezone`` argument must", "__enter__(self): if self.timezone is None: deactivate() else: activate(self.timezone) def __exit__(self,", "ensure that they don't receive an invalid value like None.", "\"\"\"Timezone helper functions. This module uses pytz when it's available", "Python's docs. Used only when pytz isn't available. \"\"\" def", "zone name. If it is a time zone name, pytz", "time zone is specified. \"\"\" if timezone is None: timezone", "# for regular tzinfo objects local_now = datetime.now(timezone) return timezone.tzname(local_now)", "shows that datetime.now(tz=utc) is 24% slower return datetime.utcnow().replace(tzinfo=utc) else: return", "make_naive(value, timezone): \"\"\" Makes an aware datetime.datetime naive in a", "code is moved in __init__ to execute it as late", "a ``tzinfo`` subclass, a time zone name, or ``None``. If", "UTC() \"\"\"UTC time zone as a tzinfo instance.\"\"\" # In", "time is defined by the current time zone, unless another", "dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, 0) stamp = _time.mktime(tt)", "_time.daylight: self.DSTOFFSET = timedelta(seconds=-_time.altzone) else: self.DSTOFFSET = self.STDOFFSET self.DSTDIFF =", "return _localtime # This function exists for consistency with get_current_timezone_name", "zone. \"\"\" return _get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone): \"\"\" Returns the name", "when it's available and fallbacks when it isn't. \"\"\" from", "_localtime = pytz.timezone(settings.TIME_ZONE) else: _localtime = LocalTimezone() return _localtime #", "to local time if necessary. If use_tz is provided and", "basestring) and pytz is not None: _active.value = pytz.timezone(timezone) else:", "is not None, that will force the value to be", "time zones ZERO = timedelta(0) class UTC(tzinfo): \"\"\" UTC implementation", "This is the time zone defined by settings.TIME_ZONE. See also", "# By design, these four functions don't perform any checks", "for the current thread. Django will then use the time", "is not None def is_naive(value): \"\"\" Determines if a given", "Returns an aware or naive datetime.datetime, depending on settings.USE_TZ. \"\"\"", "del _active.value # Templates def template_localtime(value, use_tz=None): \"\"\" Checks if", "time, # wrap the expression in a function and cache", "thread. This is a context manager that uses ``~django.utils.timezone.activate()`` to", "class LocalTimezone(tzinfo): \"\"\" Local time implementation taken from Python's docs.", "it to local time if necessary. If use_tz is provided", "ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt", "isinstance(settings.TIME_ZONE, basestring) and pytz is not None: _localtime = pytz.timezone(settings.TIME_ZONE)", "else: del _active.value # Templates def template_localtime(value, use_tz=None): \"\"\" Checks", "will force the value to be converted (or not), overriding", "LocalTimezone(tzinfo): \"\"\" Local time implementation taken from Python's docs. Used", "is None def make_aware(value, timezone): \"\"\" Makes a naive datetime.datetime", "hasattr(_active, \"value\"): del _active.value class override(object): \"\"\" Temporarily set the", "django.conf import settings __all__ = [ 'utc', 'get_default_timezone', 'get_current_timezone', 'activate',", "AttributeError: # for regular tzinfo objects local_now = datetime.now(timezone) return", "for the current thread. The ``timezone`` argument must be an", "it as late as possible # See get_default_timezone(). self.STDOFFSET =", "invalid value like None. def is_aware(value): \"\"\" Determines if a", "None def is_naive(value): \"\"\" Determines if a given datetime.datetime is", "datetime) and (settings.USE_TZ if use_tz is None else use_tz) and", "local time zones ZERO = timedelta(0) class UTC(tzinfo): \"\"\" UTC", "zone. \"\"\" value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available", "zones value = timezone.normalize(value) return value def now(): \"\"\" Returns", "Sets the time zone for the current thread. The ``timezone``", "return value def now(): \"\"\" Returns an aware or naive", "settings __all__ = [ 'utc', 'get_default_timezone', 'get_current_timezone', 'activate', 'deactivate', 'override',", "naive datetime.datetime, depending on settings.USE_TZ. \"\"\" if settings.USE_TZ: # timeit", "None from django.conf import settings __all__ = [ 'utc', 'get_default_timezone',", "execute it as late as possible # See get_default_timezone(). self.STDOFFSET", "return ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt):", "else: return datetime.now() # By design, these four functions don't", "install pytz. \"\"\" def __init__(self): # This code is moved", "def dst(self, dt): return ZERO class LocalTimezone(tzinfo): \"\"\" Local time", "Determines if a given datetime.datetime is naive. The logic is", "utcoffset(self, dt): return ZERO def tzname(self, dt): return \"UTC\" def", "def deactivate(): \"\"\" Unsets the time zone for the current", "local time if necessary. If use_tz is provided and is", "or ``None``. If is it a time zone name, pytz", "return timezone.zone except AttributeError: # for regular tzinfo objects local_now", "is specified. \"\"\" if timezone is None: timezone = get_current_timezone()", "enables the default time zone. \"\"\" def __init__(self, timezone): self.timezone", "available for pytz time zones value = timezone.normalize(value) return value.replace(tzinfo=None)", "settings.TIME_ZONE. See also :func:`get_current_timezone`. \"\"\" global _localtime if _localtime is", "import pytz except ImportError: pytz = None from django.conf import", "def now(): \"\"\" Returns an aware or naive datetime.datetime, depending", "is moved in __init__ to execute it as late as", "dt): tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(),", "# This code is moved in __init__ to execute it", "localtime(value) if should_convert else value # Utilities def localtime(value, timezone=None):", "localtime(value, timezone=None): \"\"\" Converts an aware datetime.datetime to local time.", "logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo", "def __init__(self): # This code is moved in __init__ to", "the time zone defined by settings.TIME_ZONE. See also :func:`get_current_timezone`. \"\"\"", "a tzinfo subclass or a time zone name. If it", "time zone as a tzinfo instance.\"\"\" # In order to", "time zone aware. \"\"\" if hasattr(timezone, 'localize'): # available for", "value to be converted (or not), overriding the value of", "__init__(self, timezone): self.timezone = timezone self.old_timezone = getattr(_active, 'value', None)", "must be an instance of a tzinfo subclass or a", "= self.old_timezone else: del _active.value # Templates def template_localtime(value, use_tz=None):", "time zone. \"\"\" return _get_timezone_name(get_default_timezone()) _active = local() def get_current_timezone():", "it isn't. \"\"\" from datetime import datetime, timedelta, tzinfo from", "value.tzinfo.utcoffset(value) is None def make_aware(value, timezone): \"\"\" Makes a naive", "Local time implementation taken from Python's docs. Used only when", "if self.timezone is None: deactivate() else: activate(self.timezone) def __exit__(self, exc_type,", "the current thread. This is a context manager that uses", "(settings.USE_TZ if use_tz is None else use_tz) and not is_naive(value)", "exit. The ``timezone`` argument must be an instance of a", "\"\"\" Sets the time zone for the current thread. The", "isn't available. \"\"\" def __repr__(self): return \"<UTC>\" def utcoffset(self, dt):", "``None``, Django enables the default time zone. \"\"\" def __init__(self,", "return self.DSTDIFF else: return ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)]", "def localtime(value, timezone=None): \"\"\" Converts an aware datetime.datetime to local", "time. Local time is defined by the current time zone,", "a given datetime.datetime is aware. The logic is described in", "an aware datetime.datetime naive in a given time zone. \"\"\"", "Makes an aware datetime.datetime naive in a given time zone.", "from Python's docs. Used only when pytz isn't available, and", "True)) return localtime(value) if should_convert else value # Utilities def", "class UTC(tzinfo): \"\"\" UTC implementation taken from Python's docs. Used", "# may be wrong around DST changes return value.replace(tzinfo=timezone) def", "timezone.zone except AttributeError: # for regular tzinfo objects local_now =", "make_aware(value, timezone): \"\"\" Makes a naive datetime.datetime in a given", "ZERO = timedelta(0) class UTC(tzinfo): \"\"\" UTC implementation taken from", "self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET tzinfo.__init__(self) def __repr__(self): return \"<LocalTimezone>\"", "None def get_default_timezone(): \"\"\" Returns the default time zone as", "that will force the value to be converted (or not),", "available for pytz time zones return timezone.localize(value, is_dst=None) else: #", "getattr(_active, 'value', None) def __enter__(self): if self.timezone is None: deactivate()", "dt): return \"UTC\" def dst(self, dt): return ZERO class LocalTimezone(tzinfo):", "is required. If it is ``None``, Django enables the default", "is ``None``, Django enables the default time zone. \"\"\" def", "wrong around DST changes return value.replace(tzinfo=timezone) def make_naive(value, timezone): \"\"\"", "also :func:`get_current_timezone`. \"\"\" global _localtime if _localtime is None: if", "timezones return timezone.zone except AttributeError: # for regular tzinfo objects", "the time zone for the current thread. The ``timezone`` argument", "tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0,", "it isn't thread safe. def activate(timezone): \"\"\" Sets the time", "``None``. If is it a time zone name, pytz is", "'value', None) def __enter__(self): if self.timezone is None: deactivate() else:", "_localtime # This function exists for consistency with get_current_timezone_name def", "of ``timezone``. \"\"\" try: # for pytz timezones return timezone.zone", "the value to be converted (or not), overriding the value", "template_localtime(value, use_tz=None): \"\"\" Checks if value is a datetime and", "time, just install pytz. \"\"\" def __init__(self): # This code", "as a tzinfo instance. This is the time zone defined", "if should_convert else value # Utilities def localtime(value, timezone=None): \"\"\"", "a time zone name, pytz is required. \"\"\" if isinstance(timezone,", "necessary. If use_tz is provided and is not None, that", "unless another time zone is specified. \"\"\" if timezone is", "timezone.normalize(value) return value def now(): \"\"\" Returns an aware or", "given datetime.datetime is aware. The logic is described in Python's", "_localtime is None: if isinstance(settings.TIME_ZONE, basestring) and pytz is not", "return datetime.utcnow().replace(tzinfo=utc) else: return datetime.now() # By design, these four", "pytz except ImportError: pytz = None from django.conf import settings", "is_dst=None) else: # may be wrong around DST changes return", "to local time. Local time is defined by the current", "zone as a tzinfo instance. This is the time zone", "Checks if value is a datetime and converts it to", "function and cache the result. _localtime = None def get_default_timezone():", "naive datetime.datetime in a given time zone aware. \"\"\" if", "like None. def is_aware(value): \"\"\" Determines if a given datetime.datetime", "UTC implementation taken from Python's docs. Used only when pytz", "it's available and fallbacks when it isn't. \"\"\" from datetime", "activate(self.timezone) def __exit__(self, exc_type, exc_value, traceback): if self.old_timezone is not", "LocalTimezone() return _localtime # This function exists for consistency with", "get_current_timezone_name(): \"\"\" Returns the name of the currently active time", "def tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt =", "self.STDOFFSET tzinfo.__init__(self) def __repr__(self): return \"<LocalTimezone>\" def utcoffset(self, dt): if", "override(object): \"\"\" Temporarily set the time zone for the current", "None: _active.value = pytz.timezone(timezone) else: raise ValueError(\"Invalid timezone: %r\" %", "designed for use by the template engine. \"\"\" should_convert =", "timezone = get_current_timezone() value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): #", "the currently active time zone. \"\"\" return _get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone):", "'make_aware', 'make_naive', ] # UTC and local time zones ZERO", "of a tzinfo subclass or a time zone name. If", "if isinstance(timezone, tzinfo): _active.value = timezone elif isinstance(timezone, basestring) and", "set the time zone for the current thread. This is", "tzinfo subclass or a time zone name. If it is", "# See get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone) if _time.daylight: self.DSTOFFSET =", "> 0 utc = pytz.utc if pytz else UTC() \"\"\"UTC", "ZERO class LocalTimezone(tzinfo): \"\"\" Local time implementation taken from Python's", "0 utc = pytz.utc if pytz else UTC() \"\"\"UTC time", "None: timezone = get_current_timezone() value = value.astimezone(timezone) if hasattr(timezone, 'normalize'):", "tzinfo instance. This is the time zone defined by settings.TIME_ZONE.", "self.DSTOFFSET - self.STDOFFSET tzinfo.__init__(self) def __repr__(self): return \"<LocalTimezone>\" def utcoffset(self,", "\"\"\" Returns the name of the default time zone. \"\"\"", "# These functions don't change os.environ['TZ'] and call time.tzset() #", "for use by the template engine. \"\"\" should_convert = (isinstance(value,", "[ 'utc', 'get_default_timezone', 'get_current_timezone', 'activate', 'deactivate', 'override', 'is_naive', 'is_aware', 'make_aware',", "These functions don't change os.environ['TZ'] and call time.tzset() # because", "self.old_timezone else: del _active.value # Templates def template_localtime(value, use_tz=None): \"\"\"", "\"UTC\" def dst(self, dt): return ZERO class LocalTimezone(tzinfo): \"\"\" Local", "None else use_tz) and not is_naive(value) and getattr(value, 'convert_to_local_time', True))", "zone is specified. \"\"\" if timezone is None: timezone =", "implementation taken from Python's docs. Used only when pytz isn't", "by the template engine. \"\"\" should_convert = (isinstance(value, datetime) and", "def get_default_timezone(): \"\"\" Returns the default time zone as a", "pytz.timezone(settings.TIME_ZONE) else: _localtime = LocalTimezone() return _localtime # This function", "# Timezone selection functions. # These functions don't change os.environ['TZ']", "If is it a time zone name, pytz is required.", "timeit shows that datetime.now(tz=utc) is 24% slower return datetime.utcnow().replace(tzinfo=utc) else:", "your time, just install pytz. \"\"\" def __init__(self): # This", "_active.value = timezone elif isinstance(timezone, basestring) and pytz is not", "# because it isn't thread safe. def activate(timezone): \"\"\" Sets", "timezone on exit. The ``timezone`` argument must be an instance", "timedelta(0) class UTC(tzinfo): \"\"\" UTC implementation taken from Python's docs.", "# This function exists for consistency with get_current_timezone_name def get_default_timezone_name():", "on their arguments. # The caller should ensure that they", "just install pytz. \"\"\" def __init__(self): # This code is", "stamp = _time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst > 0", "trouble with this class, don't waste your time, just install", "the result. _localtime = None def get_default_timezone(): \"\"\" Returns the", "\"\"\" return getattr(_active, \"value\", get_default_timezone()) def get_current_timezone_name(): \"\"\" Returns the", "argument must be an instance of a tzinfo subclass or", "a tzinfo instance. This is the time zone defined by", "None or value.tzinfo.utcoffset(value) is None def make_aware(value, timezone): \"\"\" Makes", "= _time.localtime(stamp) return tt.tm_isdst > 0 utc = pytz.utc if", "\"\"\" return _get_timezone_name(get_default_timezone()) _active = local() def get_current_timezone(): \"\"\" Returns", "Timezone selection functions. # These functions don't change os.environ['TZ'] and", "self.old_timezone is not None: _active.value = self.old_timezone else: del _active.value", "in a given time zone. \"\"\" value = value.astimezone(timezone) if", "zone as a tzinfo instance. \"\"\" return getattr(_active, \"value\", get_default_timezone())", "to avoid accessing the settings at compile time, # wrap", "if hasattr(timezone, 'localize'): # available for pytz time zones return", "tzinfo instance.\"\"\" # In order to avoid accessing the settings", "\"value\", get_default_timezone()) def get_current_timezone_name(): \"\"\" Returns the name of the", "if isinstance(settings.TIME_ZONE, basestring) and pytz is not None: _localtime =", "cache the result. _localtime = None def get_default_timezone(): \"\"\" Returns", "timezone): \"\"\" Makes an aware datetime.datetime naive in a given", "self._isdst(dt): return self.DSTOFFSET else: return self.STDOFFSET def dst(self, dt): if", "is not None: _active.value = self.old_timezone else: del _active.value #", "is designed for use by the template engine. \"\"\" should_convert", "= pytz.timezone(timezone) else: raise ValueError(\"Invalid timezone: %r\" % timezone) def", "having trouble with this class, don't waste your time, just", "_time try: import pytz except ImportError: pytz = None from", "it is ``None``, Django enables the default time zone. \"\"\"", "time if necessary. If use_tz is provided and is not", "compile time, # wrap the expression in a function and", "timedelta(seconds=-_time.timezone) if _time.daylight: self.DSTOFFSET = timedelta(seconds=-_time.altzone) else: self.DSTOFFSET = self.STDOFFSET", "dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt = (dt.year, dt.month,", "set the timezone on entry, and restores the previously active", "an instance of a tzinfo subclass or a time zone", "tzinfo.__init__(self) def __repr__(self): return \"<LocalTimezone>\" def utcoffset(self, dt): if self._isdst(dt):", "deactivate(): \"\"\" Unsets the time zone for the current thread.", "a naive datetime.datetime in a given time zone aware. \"\"\"", "class, don't waste your time, just install pytz. \"\"\" def", "Templates def template_localtime(value, use_tz=None): \"\"\" Checks if value is a", "use by the template engine. \"\"\" should_convert = (isinstance(value, datetime)", "and not is_naive(value) and getattr(value, 'convert_to_local_time', True)) return localtime(value) if", "if pytz else UTC() \"\"\"UTC time zone as a tzinfo", "time zone for the current thread. Django will then use", "don't change os.environ['TZ'] and call time.tzset() # because it isn't", "import settings __all__ = [ 'utc', 'get_default_timezone', 'get_current_timezone', 'activate', 'deactivate',", "module uses pytz when it's available and fallbacks when it", "is a time zone name, pytz is required. \"\"\" if", "and fallbacks when it isn't. \"\"\" from datetime import datetime,", "= timedelta(seconds=-_time.altzone) else: self.DSTOFFSET = self.STDOFFSET self.DSTDIFF = self.DSTOFFSET -", "time zone. \"\"\" return _get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone): \"\"\" Returns the", "value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available for pytz", "else: return ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self,", "is it a time zone name, pytz is required. If", "docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is None or value.tzinfo.utcoffset(value) is", "call time.tzset() # because it isn't thread safe. def activate(timezone):", "__init__(self): # This code is moved in __init__ to execute", "only when pytz isn't available. \"\"\" def __repr__(self): return \"<UTC>\"", "dt): return ZERO def tzname(self, dt): return \"UTC\" def dst(self,", "time zone name, pytz is required. \"\"\" if isinstance(timezone, tzinfo):", "instance. This is the time zone defined by settings.TIME_ZONE. See", "% timezone) def deactivate(): \"\"\" Unsets the time zone for", "\"\"\" Returns the name of the currently active time zone.", "dst(self, dt): return ZERO class LocalTimezone(tzinfo): \"\"\" Local time implementation", "0) stamp = _time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst >", "time zones return timezone.localize(value, is_dst=None) else: # may be wrong", "utc = pytz.utc if pytz else UTC() \"\"\"UTC time zone", "a given time zone aware. \"\"\" if hasattr(timezone, 'localize'): #", "functions don't perform any checks on their arguments. # The", "\"\"\" return value.tzinfo is None or value.tzinfo.utcoffset(value) is None def", "\"\"\" Makes an aware datetime.datetime naive in a given time", "in a given time zone aware. \"\"\" if hasattr(timezone, 'localize'):", "datetime.datetime naive in a given time zone. \"\"\" value =", "at compile time, # wrap the expression in a function", "then use the time zone defined by settings.TIME_ZONE. \"\"\" if", "the settings at compile time, # wrap the expression in", "This function exists for consistency with get_current_timezone_name def get_default_timezone_name(): \"\"\"", "isinstance(timezone, basestring) and pytz is not None: _active.value = pytz.timezone(timezone)", "_active.value = pytz.timezone(timezone) else: raise ValueError(\"Invalid timezone: %r\" % timezone)", "def is_naive(value): \"\"\" Determines if a given datetime.datetime is naive.", "\"\"\" from datetime import datetime, timedelta, tzinfo from threading import", "from Python's docs. Used only when pytz isn't available. \"\"\"", "global _localtime if _localtime is None: if isinstance(settings.TIME_ZONE, basestring) and", "zone. \"\"\" def __init__(self, timezone): self.timezone = timezone self.old_timezone =", "def __exit__(self, exc_type, exc_value, traceback): if self.old_timezone is not None:", "_get_timezone_name(timezone): \"\"\" Returns the name of ``timezone``. \"\"\" try: #", "force the value to be converted (or not), overriding the", "restores the previously active timezone on exit. The ``timezone`` argument", "This function is designed for use by the template engine.", "use_tz) and not is_naive(value) and getattr(value, 'convert_to_local_time', True)) return localtime(value)", "Utilities def localtime(value, timezone=None): \"\"\" Converts an aware datetime.datetime to", "settings.USE_TZ. \"\"\" if settings.USE_TZ: # timeit shows that datetime.now(tz=utc) is", "around DST changes return value.replace(tzinfo=timezone) def make_naive(value, timezone): \"\"\" Makes", "'get_default_timezone', 'get_current_timezone', 'activate', 'deactivate', 'override', 'is_naive', 'is_aware', 'make_aware', 'make_naive', ]", "for consistency with get_current_timezone_name def get_default_timezone_name(): \"\"\" Returns the name", "timezone: %r\" % timezone) def deactivate(): \"\"\" Unsets the time", "the default time zone. \"\"\" def __init__(self, timezone): self.timezone =", "docs. Used only when pytz isn't available. \"\"\" def __repr__(self):", "hasattr(timezone, 'normalize'): # available for pytz time zones value =", "name, pytz is required. \"\"\" if isinstance(timezone, tzinfo): _active.value =", "likely inaccurate. If you're having trouble with this class, don't", "None: _localtime = pytz.timezone(settings.TIME_ZONE) else: _localtime = LocalTimezone() return _localtime", "with get_current_timezone_name def get_default_timezone_name(): \"\"\" Returns the name of the", "docs. Used only when pytz isn't available, and most likely", "# In order to avoid accessing the settings at compile", "\"\"\" Returns the default time zone as a tzinfo instance.", "# available for pytz time zones return timezone.localize(value, is_dst=None) else:", "def _isdst(self, dt): tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute,", "when pytz isn't available, and most likely inaccurate. If you're", "isn't thread safe. def activate(timezone): \"\"\" Sets the time zone", "for regular tzinfo objects local_now = datetime.now(timezone) return timezone.tzname(local_now) #", "Used only when pytz isn't available, and most likely inaccurate.", "``timezone`` argument must be an instance of a tzinfo subclass", "timezone): self.timezone = timezone self.old_timezone = getattr(_active, 'value', None) def", "return datetime.now() # By design, these four functions don't perform", "_isdst(self, dt): tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,", "'utc', 'get_default_timezone', 'get_current_timezone', 'activate', 'deactivate', 'override', 'is_naive', 'is_aware', 'make_aware', 'make_naive',", "if a given datetime.datetime is aware. The logic is described", "class override(object): \"\"\" Temporarily set the time zone for the", "dt.minute, dt.second, dt.weekday(), 0, 0) stamp = _time.mktime(tt) tt =", "Temporarily set the time zone for the current thread. This", "entry, and restores the previously active timezone on exit. The", "and cache the result. _localtime = None def get_default_timezone(): \"\"\"", "timezone on entry, and restores the previously active timezone on", "changes return value.replace(tzinfo=timezone) def make_naive(value, timezone): \"\"\" Makes an aware", "\"\"\"UTC time zone as a tzinfo instance.\"\"\" # In order", "the template engine. \"\"\" should_convert = (isinstance(value, datetime) and (settings.USE_TZ", "available. \"\"\" def __repr__(self): return \"<UTC>\" def utcoffset(self, dt): return", "and value.tzinfo.utcoffset(value) is not None def is_naive(value): \"\"\" Determines if", "\"\"\" Checks if value is a datetime and converts it", "if hasattr(timezone, 'normalize'): # available for pytz time zones value", "pytz is not None: _localtime = pytz.timezone(settings.TIME_ZONE) else: _localtime =", "if a given datetime.datetime is naive. The logic is described", "def make_naive(value, timezone): \"\"\" Makes an aware datetime.datetime naive in", "self.DSTOFFSET else: return self.STDOFFSET def dst(self, dt): if self._isdst(dt): return", "as a tzinfo instance.\"\"\" # In order to avoid accessing", "return localtime(value) if should_convert else value # Utilities def localtime(value,", "This is a context manager that uses ``~django.utils.timezone.activate()`` to set", "\"\"\" def __init__(self, timezone): self.timezone = timezone self.old_timezone = getattr(_active,", "If use_tz is provided and is not None, that will", "is required. \"\"\" if isinstance(timezone, tzinfo): _active.value = timezone elif", "'make_naive', ] # UTC and local time zones ZERO =", "Returns the default time zone as a tzinfo instance. This", "``timezone`` argument must be an instance of a ``tzinfo`` subclass,", "# available for pytz time zones value = timezone.normalize(value) return", "else: # may be wrong around DST changes return value.replace(tzinfo=timezone)", "be an instance of a ``tzinfo`` subclass, a time zone", "isinstance(timezone, tzinfo): _active.value = timezone elif isinstance(timezone, basestring) and pytz", "be wrong around DST changes return value.replace(tzinfo=timezone) def make_naive(value, timezone):", "tzinfo objects local_now = datetime.now(timezone) return timezone.tzname(local_now) # Timezone selection", "an instance of a ``tzinfo`` subclass, a time zone name,", "from django.conf import settings __all__ = [ 'utc', 'get_default_timezone', 'get_current_timezone',", "late as possible # See get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone) if", "most likely inaccurate. If you're having trouble with this class,", "= datetime.now(timezone) return timezone.tzname(local_now) # Timezone selection functions. # These", "def utcoffset(self, dt): if self._isdst(dt): return self.DSTOFFSET else: return self.STDOFFSET", "= timezone self.old_timezone = getattr(_active, 'value', None) def __enter__(self): if", "timezone) def deactivate(): \"\"\" Unsets the time zone for the", "timezone is None: timezone = get_current_timezone() value = value.astimezone(timezone) if", "and (settings.USE_TZ if use_tz is None else use_tz) and not", "use_tz is provided and is not None, that will force", "a tzinfo instance.\"\"\" # In order to avoid accessing the", "datetime.datetime to local time. Local time is defined by the", "if self.old_timezone is not None: _active.value = self.old_timezone else: del", "template engine. \"\"\" should_convert = (isinstance(value, datetime) and (settings.USE_TZ if", "datetime.now() # By design, these four functions don't perform any", "is defined by the current time zone, unless another time", "datetime and converts it to local time if necessary. If", "datetime import datetime, timedelta, tzinfo from threading import local import", "pytz isn't available, and most likely inaccurate. If you're having", "0, 0) stamp = _time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst", "tzname(self, dt): return \"UTC\" def dst(self, dt): return ZERO class", "UTC(tzinfo): \"\"\" UTC implementation taken from Python's docs. Used only", "defined by the current time zone, unless another time zone", "(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, 0) stamp", "In order to avoid accessing the settings at compile time,", "'activate', 'deactivate', 'override', 'is_naive', 'is_aware', 'make_aware', 'make_naive', ] # UTC", "this class, don't waste your time, just install pytz. \"\"\"", "# for pytz timezones return timezone.zone except AttributeError: # for", "\"\"\" Returns an aware or naive datetime.datetime, depending on settings.USE_TZ.", "is a datetime and converts it to local time if", "the value of settings.USE_TZ. This function is designed for use", "value like None. def is_aware(value): \"\"\" Determines if a given", "The ``timezone`` argument must be an instance of a ``tzinfo``", "the time zone for the current thread. Django will then", "time zone, unless another time zone is specified. \"\"\" if", "\"\"\" Unsets the time zone for the current thread. Django", "or naive datetime.datetime, depending on settings.USE_TZ. \"\"\" if settings.USE_TZ: #", "getattr(value, 'convert_to_local_time', True)) return localtime(value) if should_convert else value #", "if self._isdst(dt): return self.DSTOFFSET else: return self.STDOFFSET def dst(self, dt):", "DST changes return value.replace(tzinfo=timezone) def make_naive(value, timezone): \"\"\" Makes an", "the default time zone as a tzinfo instance. This is", "a given time zone. \"\"\" value = value.astimezone(timezone) if hasattr(timezone,", "This code is moved in __init__ to execute it as", "now(): \"\"\" Returns an aware or naive datetime.datetime, depending on", "docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is not None and value.tzinfo.utcoffset(value)", "name. If it is a time zone name, pytz is", "Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is None or value.tzinfo.utcoffset(value)", "Local time is defined by the current time zone, unless", "name of ``timezone``. \"\"\" try: # for pytz timezones return", "if settings.USE_TZ: # timeit shows that datetime.now(tz=utc) is 24% slower", "(or not), overriding the value of settings.USE_TZ. This function is", "ImportError: pytz = None from django.conf import settings __all__ =", "raise ValueError(\"Invalid timezone: %r\" % timezone) def deactivate(): \"\"\" Unsets", "wrap the expression in a function and cache the result.", "because it isn't thread safe. def activate(timezone): \"\"\" Sets the", "local_now = datetime.now(timezone) return timezone.tzname(local_now) # Timezone selection functions. #", "timezone self.old_timezone = getattr(_active, 'value', None) def __enter__(self): if self.timezone", "a time zone name, pytz is required. If it is", "should_convert else value # Utilities def localtime(value, timezone=None): \"\"\" Converts", "_localtime if _localtime is None: if isinstance(settings.TIME_ZONE, basestring) and pytz", "= get_current_timezone() value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available", "http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is not None and value.tzinfo.utcoffset(value) is", "get_default_timezone_name(): \"\"\" Returns the name of the default time zone.", "def get_default_timezone_name(): \"\"\" Returns the name of the default time", "design, these four functions don't perform any checks on their", "the name of the default time zone. \"\"\" return _get_timezone_name(get_default_timezone())", "pytz is required. \"\"\" if isinstance(timezone, tzinfo): _active.value = timezone", "datetime.datetime is naive. The logic is described in Python's docs:", "timezone=None): \"\"\" Converts an aware datetime.datetime to local time. Local", "basestring) and pytz is not None: _localtime = pytz.timezone(settings.TIME_ZONE) else:", "settings at compile time, # wrap the expression in a", "_time.localtime(stamp) return tt.tm_isdst > 0 utc = pytz.utc if pytz", "= self.STDOFFSET self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET tzinfo.__init__(self) def __repr__(self):", "``tzinfo`` subclass, a time zone name, or ``None``. If is" ]
[ "resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier') assert sorted(names.split()) == [ 'download', 'esgsearch',", "resp = client.get(service='wps', request='getcapabilities', version='1.0.0') names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process'", "version='1.0.0') names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier') assert sorted(names.split()) ==", "'/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier') assert sorted(names.split()) == [ 'download', 'esgsearch', 'thredds_download',", "import client_for from malleefowl.processes import processes def test_wps_caps(): client =", "= resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier') assert sorted(names.split()) == [ 'download',", "from pywps.tests import assert_response_success from .common import client_for from malleefowl.processes", "from .common import client_for from malleefowl.processes import processes def test_wps_caps():", "pywps import Service from pywps.tests import assert_response_success from .common import", "processes def test_wps_caps(): client = client_for(Service(processes=processes)) resp = client.get(service='wps', request='getcapabilities',", "import assert_response_success from .common import client_for from malleefowl.processes import processes", "import processes def test_wps_caps(): client = client_for(Service(processes=processes)) resp = client.get(service='wps',", "client_for from malleefowl.processes import processes def test_wps_caps(): client = client_for(Service(processes=processes))", "'/wps:Process' '/ows:Identifier') assert sorted(names.split()) == [ 'download', 'esgsearch', 'thredds_download', 'workflow'", "client.get(service='wps', request='getcapabilities', version='1.0.0') names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier') assert", "names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier') assert sorted(names.split()) == [", "client = client_for(Service(processes=processes)) resp = client.get(service='wps', request='getcapabilities', version='1.0.0') names =", "import Service from pywps.tests import assert_response_success from .common import client_for", "= client.get(service='wps', request='getcapabilities', version='1.0.0') names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier')", "from pywps import Service from pywps.tests import assert_response_success from .common", "request='getcapabilities', version='1.0.0') names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier') assert sorted(names.split())", "assert_response_success from .common import client_for from malleefowl.processes import processes def", ".common import client_for from malleefowl.processes import processes def test_wps_caps(): client", "pywps.tests import assert_response_success from .common import client_for from malleefowl.processes import", "def test_wps_caps(): client = client_for(Service(processes=processes)) resp = client.get(service='wps', request='getcapabilities', version='1.0.0')", "from malleefowl.processes import processes def test_wps_caps(): client = client_for(Service(processes=processes)) resp", "import pytest from pywps import Service from pywps.tests import assert_response_success", "malleefowl.processes import processes def test_wps_caps(): client = client_for(Service(processes=processes)) resp =", "Service from pywps.tests import assert_response_success from .common import client_for from", "client_for(Service(processes=processes)) resp = client.get(service='wps', request='getcapabilities', version='1.0.0') names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings'", "<gh_stars>0 import pytest from pywps import Service from pywps.tests import", "pytest from pywps import Service from pywps.tests import assert_response_success from", "test_wps_caps(): client = client_for(Service(processes=processes)) resp = client.get(service='wps', request='getcapabilities', version='1.0.0') names", "'/ows:Identifier') assert sorted(names.split()) == [ 'download', 'esgsearch', 'thredds_download', 'workflow' ]", "= client_for(Service(processes=processes)) resp = client.get(service='wps', request='getcapabilities', version='1.0.0') names = resp.xpath_text('/wps:Capabilities'" ]
[ ":: Python :: 3.7\", \"Programming Language :: Python :: Implementation", "author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\", \"tunneling\", \"webhook\", \"localhost\"], license=\"MIT\", classifiers=[", "Libraries :: Python Modules\", \"Environment :: Console\", \"Environment :: Web", "packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*\", install_requires=[ \"future\", \"pyyaml\" ],", "license=\"MIT\", classifiers=[ \"Programming Language :: Python :: 2.7\", \"Programming Language", "Audience :: Developers\", \"Intended Audience :: Education\", \"Development Status ::", "= \"Copyright 2019, <NAME>\" __version__ = \"1.4.0\" with open(\"README.md\", \"r\")", "Windows\", \"Operating System :: POSIX\", \"Operating System :: Unix\" ]", "Development :: Libraries :: Python Modules\", \"Environment :: Console\", \"Environment", "\"Programming Language :: Python :: 3.7\", \"Programming Language :: Python", "url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\", \"tunneling\", \"webhook\", \"localhost\"], license=\"MIT\", classifiers=[ \"Programming", "Language :: Python :: 3.6\", \"Programming Language :: Python ::", "Python :: 3.4\", \"Programming Language :: Python :: 3.5\", \"Programming", "Implementation :: CPython\", \"Programming Language :: Python :: Implementation ::", ":: Python :: 3.5\", \"Programming Language :: Python :: 3.6\",", ":: Implementation :: PyPy\", \"Topic :: Software Development :: Libraries", "\"Intended Audience :: Education\", \"Development Status :: 5 - Production/Stable\",", "Education\", \"Development Status :: 5 - Production/Stable\", \"License :: OSI", "\"Topic :: Software Development :: Libraries :: Python Modules\", \"Environment", "\"Operating System :: Microsoft :: Windows\", \"Operating System :: POSIX\",", "Language :: Python :: 2.7\", \"Programming Language :: Python ::", "- Production/Stable\", \"License :: OSI Approved :: MIT License\", \"Operating", "<NAME>\" __version__ = \"1.4.0\" with open(\"README.md\", \"r\") as f: long_description", "with open(\"README.md\", \"r\") as f: long_description = f.read() setup( name=\"pyngrok\",", "\"License :: OSI Approved :: MIT License\", \"Operating System ::", "\"Programming Language :: Python :: 3.6\", \"Programming Language :: Python", "description=\"A Python wrapper for Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\",", ":: Console\", \"Environment :: Web Environment\", \"Intended Audience :: Developers\",", "Software Development :: Libraries :: Python Modules\", \"Environment :: Console\",", "PyPy\", \"Topic :: Software Development :: Libraries :: Python Modules\",", "Language :: Python :: 3.4\", \"Programming Language :: Python ::", "version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*\", install_requires=[ \"future\", \"pyyaml\"", ":: 5 - Production/Stable\", \"License :: OSI Approved :: MIT", "Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\", \"tunneling\",", "\"webhook\", \"localhost\"], license=\"MIT\", classifiers=[ \"Programming Language :: Python :: 2.7\",", ":: 3.7\", \"Programming Language :: Python :: Implementation :: CPython\",", "Status :: 5 - Production/Stable\", \"License :: OSI Approved ::", "long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\", \"tunneling\", \"webhook\",", "Implementation :: PyPy\", \"Topic :: Software Development :: Libraries ::", "setuptools import setup __author__ = \"<NAME>\" __copyright__ = \"Copyright 2019,", ":: PyPy\", \"Topic :: Software Development :: Libraries :: Python", "__copyright__ = \"Copyright 2019, <NAME>\" __version__ = \"1.4.0\" with open(\"README.md\",", "\"tunnel\", \"tunneling\", \"webhook\", \"localhost\"], license=\"MIT\", classifiers=[ \"Programming Language :: Python", "Language :: Python :: Implementation :: PyPy\", \"Topic :: Software", ":: 3.6\", \"Programming Language :: Python :: 3.7\", \"Programming Language", "!=3.1.*, !=3.2.*, !=3.3.*\", install_requires=[ \"future\", \"pyyaml\" ], entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run", "2.7\", \"Programming Language :: Python :: 3.4\", \"Programming Language ::", "Python :: Implementation :: CPython\", \"Programming Language :: Python ::", "Python :: 2.7\", \"Programming Language :: Python :: 3.4\", \"Programming", ":: Windows\", \"Operating System :: POSIX\", \"Operating System :: Unix\"", "[console_scripts] ngrok=pyngrok.ngrok:run \"\"\", description=\"A Python wrapper for Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\",", "\"Programming Language :: Python :: 3.4\", \"Programming Language :: Python", "Language :: Python :: 3.7\", \"Programming Language :: Python ::", "author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\", \"tunneling\", \"webhook\", \"localhost\"], license=\"MIT\",", ":: Implementation :: CPython\", \"Programming Language :: Python :: Implementation", "= \"1.4.0\" with open(\"README.md\", \"r\") as f: long_description = f.read()", "download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\", \"tunneling\", \"webhook\", \"localhost\"], license=\"MIT\", classifiers=[ \"Programming Language", "\"tunneling\", \"webhook\", \"localhost\"], license=\"MIT\", classifiers=[ \"Programming Language :: Python ::", ":: Python Modules\", \"Environment :: Console\", \"Environment :: Web Environment\",", "System :: Microsoft :: Windows\", \"Operating System :: POSIX\", \"Operating", ":: Libraries :: Python Modules\", \"Environment :: Console\", \"Environment ::", ":: 2.7\", \"Programming Language :: Python :: 3.4\", \"Programming Language", "install_requires=[ \"future\", \"pyyaml\" ], entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run \"\"\", description=\"A Python", "Python :: 3.5\", \"Programming Language :: Python :: 3.6\", \"Programming", "\"Environment :: Console\", \"Environment :: Web Environment\", \"Intended Audience ::", "2019, <NAME>\" __version__ = \"1.4.0\" with open(\"README.md\", \"r\") as f:", "\"\"\", description=\"A Python wrapper for Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\",", "classifiers=[ \"Programming Language :: Python :: 2.7\", \"Programming Language ::", "5 - Production/Stable\", \"License :: OSI Approved :: MIT License\",", "!=3.3.*\", install_requires=[ \"future\", \"pyyaml\" ], entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run \"\"\", description=\"A", "as f: long_description = f.read() setup( name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7,", "\"Copyright 2019, <NAME>\" __version__ = \"1.4.0\" with open(\"README.md\", \"r\") as", ":: Python :: 2.7\", \"Programming Language :: Python :: 3.4\",", "\"Programming Language :: Python :: Implementation :: PyPy\", \"Topic ::", ":: 3.5\", \"Programming Language :: Python :: 3.6\", \"Programming Language", ":: Web Environment\", \"Intended Audience :: Developers\", \"Intended Audience ::", "open(\"README.md\", \"r\") as f: long_description = f.read() setup( name=\"pyngrok\", version=__version__,", "OSI Approved :: MIT License\", \"Operating System :: MacOS\", \"Operating", "\"Intended Audience :: Developers\", \"Intended Audience :: Education\", \"Development Status", "__version__ = \"1.4.0\" with open(\"README.md\", \"r\") as f: long_description =", "\"Environment :: Web Environment\", \"Intended Audience :: Developers\", \"Intended Audience", ":: Python :: 3.6\", \"Programming Language :: Python :: 3.7\",", "\"pyyaml\" ], entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run \"\"\", description=\"A Python wrapper for", "Python Modules\", \"Environment :: Console\", \"Environment :: Web Environment\", \"Intended", "long_description = f.read() setup( name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*, !=3.1.*,", "CPython\", \"Programming Language :: Python :: Implementation :: PyPy\", \"Topic", "\"<NAME>\" __copyright__ = \"Copyright 2019, <NAME>\" __version__ = \"1.4.0\" with", "Language :: Python :: 3.5\", \"Programming Language :: Python ::", ":: CPython\", \"Programming Language :: Python :: Implementation :: PyPy\",", "Python :: Implementation :: PyPy\", \"Topic :: Software Development ::", "f.read() setup( name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*\",", "\"Programming Language :: Python :: Implementation :: CPython\", \"Programming Language", "\"Operating System :: POSIX\", \"Operating System :: Unix\" ] )", "python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*\", install_requires=[ \"future\", \"pyyaml\" ], entry_points=\"\"\"", "\"Programming Language :: Python :: 3.5\", \"Programming Language :: Python", "Approved :: MIT License\", \"Operating System :: MacOS\", \"Operating System", "Language :: Python :: Implementation :: CPython\", \"Programming Language ::", "Web Environment\", \"Intended Audience :: Developers\", \"Intended Audience :: Education\",", "Python :: 3.6\", \"Programming Language :: Python :: 3.7\", \"Programming", "System :: MacOS\", \"Operating System :: Microsoft :: Windows\", \"Operating", "Console\", \"Environment :: Web Environment\", \"Intended Audience :: Developers\", \"Intended", "setup __author__ = \"<NAME>\" __copyright__ = \"Copyright 2019, <NAME>\" __version__", "<filename>setup.py from setuptools import setup __author__ = \"<NAME>\" __copyright__ =", "Environment\", \"Intended Audience :: Developers\", \"Intended Audience :: Education\", \"Development", "= \"<NAME>\" __copyright__ = \"Copyright 2019, <NAME>\" __version__ = \"1.4.0\"", "\"Operating System :: MacOS\", \"Operating System :: Microsoft :: Windows\",", "Python wrapper for Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__),", "__author__ = \"<NAME>\" __copyright__ = \"Copyright 2019, <NAME>\" __version__ =", "\"Programming Language :: Python :: 2.7\", \"Programming Language :: Python", ":: MIT License\", \"Operating System :: MacOS\", \"Operating System ::", "keywords=[\"ngrok\", \"tunnel\", \"tunneling\", \"webhook\", \"localhost\"], license=\"MIT\", classifiers=[ \"Programming Language ::", ":: Python :: Implementation :: PyPy\", \"Topic :: Software Development", "3.7\", \"Programming Language :: Python :: Implementation :: CPython\", \"Programming", "= f.read() setup( name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*,", "ngrok=pyngrok.ngrok:run \"\"\", description=\"A Python wrapper for Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\",", "3.5\", \"Programming Language :: Python :: 3.6\", \"Programming Language ::", ":: Microsoft :: Windows\", \"Operating System :: POSIX\", \"Operating System", "], entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run \"\"\", description=\"A Python wrapper for Ngrok.\",", "import setup __author__ = \"<NAME>\" __copyright__ = \"Copyright 2019, <NAME>\"", "\"Development Status :: 5 - Production/Stable\", \"License :: OSI Approved", "Microsoft :: Windows\", \"Operating System :: POSIX\", \"Operating System ::", "long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\", \"tunneling\", \"webhook\", \"localhost\"],", "\"1.4.0\" with open(\"README.md\", \"r\") as f: long_description = f.read() setup(", "Python :: 3.7\", \"Programming Language :: Python :: Implementation ::", "\"localhost\"], license=\"MIT\", classifiers=[ \"Programming Language :: Python :: 2.7\", \"Programming", "f: long_description = f.read() setup( name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*,", "MacOS\", \"Operating System :: Microsoft :: Windows\", \"Operating System ::", ":: OSI Approved :: MIT License\", \"Operating System :: MacOS\",", ":: Software Development :: Libraries :: Python Modules\", \"Environment ::", "!=3.2.*, !=3.3.*\", install_requires=[ \"future\", \"pyyaml\" ], entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run \"\"\",", "3.6\", \"Programming Language :: Python :: 3.7\", \"Programming Language ::", "Audience :: Education\", \"Development Status :: 5 - Production/Stable\", \"License", "wrapper for Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\",", ":: Education\", \"Development Status :: 5 - Production/Stable\", \"License ::", ":: 3.4\", \"Programming Language :: Python :: 3.5\", \"Programming Language", "entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run \"\"\", description=\"A Python wrapper for Ngrok.\", long_description=long_description,", ":: Python :: Implementation :: CPython\", \"Programming Language :: Python", "License\", \"Operating System :: MacOS\", \"Operating System :: Microsoft ::", "Developers\", \"Intended Audience :: Education\", \"Development Status :: 5 -", "\"future\", \"pyyaml\" ], entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run \"\"\", description=\"A Python wrapper", "MIT License\", \"Operating System :: MacOS\", \"Operating System :: Microsoft", "from setuptools import setup __author__ = \"<NAME>\" __copyright__ = \"Copyright", "!=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*\", install_requires=[ \"future\", \"pyyaml\" ], entry_points=\"\"\" [console_scripts]", "setup( name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*\", install_requires=[", ":: Developers\", \"Intended Audience :: Education\", \"Development Status :: 5", "Production/Stable\", \"License :: OSI Approved :: MIT License\", \"Operating System", "name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*\", install_requires=[ \"future\",", "for Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\",", ":: Python :: 3.4\", \"Programming Language :: Python :: 3.5\",", ":: MacOS\", \"Operating System :: Microsoft :: Windows\", \"Operating System", "3.4\", \"Programming Language :: Python :: 3.5\", \"Programming Language ::", "Modules\", \"Environment :: Console\", \"Environment :: Web Environment\", \"Intended Audience", "\"r\") as f: long_description = f.read() setup( name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"]," ]
[ "arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def serve_op(): return dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION),", "\"kubeflow\", \"--command\", \"apply -f /src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret()) def resultsgen_op(): return", "name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def serve_op(): return", "\"apply -f /src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret()) def resultsgen_op(): return dsl.ContainerOp( name='resultsgen',", "name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml', description='A pipeline", "image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml', description='A pipeline that", "Pipeline input variables. KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION = \"1\" TRACKML_IMAGE", "/src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret()) def resultsgen_op(): return dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION),", "KUBECTL_IMAGE_VERSION = \"1\" TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION = \"1\" def", "@dsl.pipeline( name='trackml', description='A pipeline that predicts particle tracks' ) def", "arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml', description='A pipeline that predicts particle tracks'", "= serve_op() serve.after(train) resultsgen = resultsgen_op() resultsgen.after(serve) if __name__ ==", ") def trackml(): train = train_op() serve = serve_op() serve.after(train)", "serve = serve_op() serve.after(train) resultsgen = resultsgen_op() resultsgen.after(serve) if __name__", "command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def serve_op(): return dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE,", "KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION = \"1\" TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION", "dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\", \"--command\", \"apply", "\"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION = \"1\" TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION = \"1\"", "# Pipeline input variables. KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION = \"1\"", "description='A pipeline that predicts particle tracks' ) def trackml(): train", "if __name__ == '__main__': import kfp.compiler as compiler compiler.Compiler().compile(trackml, __file__", "name='trackml', description='A pipeline that predicts particle tracks' ) def trackml():", "\"1\" TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION = \"1\" def train_op(): return", "predicts particle tracks' ) def trackml(): train = train_op() serve", ").apply(gcp.use_gcp_secret()) def resultsgen_op(): return dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"],", "\"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION = \"1\" def train_op(): return dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE,", "command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml', description='A pipeline that predicts particle", "TRACKML_IMAGE_VERSION = \"1\" def train_op(): return dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION),", "import kfp.dsl as dsl import kfp.gcp as gcp # Pipeline", "TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION = \"1\" def train_op(): return dsl.ContainerOp(", "as dsl import kfp.gcp as gcp # Pipeline input variables.", "= train_op() serve = serve_op() serve.after(train) resultsgen = resultsgen_op() resultsgen.after(serve)", "arguments=[ \"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\", \"--command\", \"apply -f /src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret())", "serve.after(train) resultsgen = resultsgen_op() resultsgen.after(serve) if __name__ == '__main__': import", "def serve_op(): return dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\", \"--namespace\",", ").apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml', description='A pipeline that predicts particle tracks' )", "dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def serve_op():", "that predicts particle tracks' ) def trackml(): train = train_op()", "__name__ == '__main__': import kfp.compiler as compiler compiler.Compiler().compile(trackml, __file__ +", "image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\", \"--command\", \"apply -f /src/k8s/serve.yaml\",", "= \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION = \"1\" def train_op(): return dsl.ContainerOp( name='train',", "== '__main__': import kfp.compiler as compiler compiler.Compiler().compile(trackml, __file__ + '.tar.gz')", "python3 import kfp.dsl as dsl import kfp.gcp as gcp #", "gcp # Pipeline input variables. KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION =", "#!/usr/bin/env python3 import kfp.dsl as dsl import kfp.gcp as gcp", "KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\", \"--command\", \"apply -f /src/k8s/serve.yaml\", ]", "train_op() serve = serve_op() serve.after(train) resultsgen = resultsgen_op() resultsgen.after(serve) if", "kfp.dsl as dsl import kfp.gcp as gcp # Pipeline input", "dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml', description='A", "image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def serve_op(): return dsl.ContainerOp(", "\"--command\", \"apply -f /src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret()) def resultsgen_op(): return dsl.ContainerOp(", "= \"1\" def train_op(): return dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"],", "] ).apply(gcp.use_gcp_secret()) def resultsgen_op(): return dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"],", ").apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def serve_op(): return dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[", "tracks' ) def trackml(): train = train_op() serve = serve_op()", "input variables. KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION = \"1\" TRACKML_IMAGE =", "\"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\", \"--command\", \"apply -f /src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret()) def", "name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\", \"--command\", \"apply -f", "\"1\" def train_op(): return dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"],", "as gcp # Pipeline input variables. KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION", "particle tracks' ) def trackml(): train = train_op() serve =", ")#.set_gpu_limit(1) def serve_op(): return dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\",", "resultsgen.after(serve) if __name__ == '__main__': import kfp.compiler as compiler compiler.Compiler().compile(trackml,", "variables. KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION = \"1\" TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\"", "serve_op(): return dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\",", "\"--namespace\", \"kubeflow\", \"--command\", \"apply -f /src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret()) def resultsgen_op():", "TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def serve_op(): return dsl.ContainerOp( name='serve',", "-f /src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret()) def resultsgen_op(): return dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE,", "kfp.gcp as gcp # Pipeline input variables. KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\"", "pipeline that predicts particle tracks' ) def trackml(): train =", "import kfp.gcp as gcp # Pipeline input variables. KUBECTL_IMAGE =", "return dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\", \"--command\",", "def trackml(): train = train_op() serve = serve_op() serve.after(train) resultsgen", "TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml', description='A pipeline that predicts", "resultsgen = resultsgen_op() resultsgen.after(serve) if __name__ == '__main__': import kfp.compiler", "resultsgen_op(): return dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline(", "resultsgen_op() resultsgen.after(serve) if __name__ == '__main__': import kfp.compiler as compiler", "def resultsgen_op(): return dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret())", "= resultsgen_op() resultsgen.after(serve) if __name__ == '__main__': import kfp.compiler as", "train = train_op() serve = serve_op() serve.after(train) resultsgen = resultsgen_op()", "def train_op(): return dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret()", "trackml(): train = train_op() serve = serve_op() serve.after(train) resultsgen =", "serve_op() serve.after(train) resultsgen = resultsgen_op() resultsgen.after(serve) if __name__ == '__main__':", "return dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml',", "return dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def", "dsl import kfp.gcp as gcp # Pipeline input variables. KUBECTL_IMAGE", "train_op(): return dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1)", "= \"1\" TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION = \"1\" def train_op():", "= \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION = \"1\" TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION =" ]
[ "publish_event, publisher, TICK_2_MINUTES, network, secs, mins) if mins % 5", "secs, mins) if mins % 60 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event,", "import utils from infrabbitmq import factory as infrabbitmq_factory from infrabbitmq.rabbitmq", "def main(network): publisher = infrabbitmq_factory.event_publisher_json_serializer() secs = 0 mins =", "== 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_5_MINUTES, network, secs, mins) if", "from infrabbitmq.rabbitmq import RabbitMQError from infrabbitmq.events_names import ( TICK_1_SECOND, TICK_1_MINUTE,", "{}\".format(event, secs)) publisher.publish(event, network, data={'tick': secs, 'mins': mins}) def main(network):", "utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_60_MINUTES, network, secs, mins) if __name__ ==", "mins): logging.info(\"publish event {} {}\".format(event, secs)) publisher.publish(event, network, data={'tick': secs,", "publish_event, publisher, TICK_5_MINUTES, network, secs, mins) if mins % 60", "True: time.sleep(1) secs += 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_SECOND, network,", "utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_SECOND, network, secs, mins) if secs %", "action='store', required=True, help='Network name (ilo, c2k, ...)') args = parser.parse_args()", "= 0 rabbitmq_exceptions = (RabbitMQError, puka.AMQPError, KeyError,) while True: time.sleep(1)", "parser.add_argument('-n', '--network', action='store', required=True, help='Network name (ilo, c2k, ...)') args", "required=True, help='Network name (ilo, c2k, ...)') args = parser.parse_args() network", "logging from infcommon import utils from infrabbitmq import factory as", "time.sleep(1) secs += 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_SECOND, network, secs,", "% 60 == 0: mins += 1 secs = 0", "main(network): publisher = infrabbitmq_factory.event_publisher_json_serializer() secs = 0 mins = 0", "= args.network.split('-')[0] main(network) except Exception as exc: logging.critical(\"Ticker Fails: {}\".format(exc))", "publish_event, publisher, TICK_60_MINUTES, network, secs, mins) if __name__ == '__main__':", "TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES, ) def publish_event(publisher, event, network,", "mins) if __name__ == '__main__': try: parser = argparse.ArgumentParser() parser.add_argument('-n',", "import argparse import logging from infcommon import utils from infrabbitmq", "infrabbitmq.events_names import ( TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES, ) def", "parser.parse_args() network = args.network.split('-')[0] main(network) except Exception as exc: logging.critical(\"Ticker", "network, data={'tick': secs, 'mins': mins}) def main(network): publisher = infrabbitmq_factory.event_publisher_json_serializer()", "from infcommon import utils from infrabbitmq import factory as infrabbitmq_factory", "1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_SECOND, network, secs, mins) if secs", "if secs % 60 == 0: mins += 1 secs", "parser = argparse.ArgumentParser() parser.add_argument('-n', '--network', action='store', required=True, help='Network name (ilo,", "if mins % 5 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_5_MINUTES,", "(RabbitMQError, puka.AMQPError, KeyError,) while True: time.sleep(1) secs += 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions,", "if mins % 60 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_60_MINUTES,", "publisher, TICK_60_MINUTES, network, secs, mins) if __name__ == '__main__': try:", "secs)) publisher.publish(event, network, data={'tick': secs, 'mins': mins}) def main(network): publisher", "TICK_60_MINUTES, network, secs, mins) if __name__ == '__main__': try: parser", "mins) if mins % 5 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher,", "import factory as infrabbitmq_factory from infrabbitmq.rabbitmq import RabbitMQError from infrabbitmq.events_names", "import RabbitMQError from infrabbitmq.events_names import ( TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES,", "secs, mins): logging.info(\"publish event {} {}\".format(event, secs)) publisher.publish(event, network, data={'tick':", "mins % 2 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_2_MINUTES, network,", "try: parser = argparse.ArgumentParser() parser.add_argument('-n', '--network', action='store', required=True, help='Network name", "rabbitmq_exceptions = (RabbitMQError, puka.AMQPError, KeyError,) while True: time.sleep(1) secs +=", "== 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_60_MINUTES, network, secs, mins) if", "publish_event, publisher, TICK_1_MINUTE, network, secs, mins) if mins % 2", "publisher, TICK_2_MINUTES, network, secs, mins) if mins % 5 ==", "if __name__ == '__main__': try: parser = argparse.ArgumentParser() parser.add_argument('-n', '--network',", "publisher, TICK_5_MINUTES, network, secs, mins) if mins % 60 ==", "logging.info(\"publish event {} {}\".format(event, secs)) publisher.publish(event, network, data={'tick': secs, 'mins':", "mins) if mins % 2 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher,", "== 0: mins += 1 secs = 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event,", "= infrabbitmq_factory.event_publisher_json_serializer() secs = 0 mins = 0 rabbitmq_exceptions =", "while True: time.sleep(1) secs += 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_SECOND,", "60 == 0: mins += 1 secs = 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions,", "0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_MINUTE, network, secs, mins) if mins", "TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES, ) def publish_event(publisher, event, network, secs,", "def publish_event(publisher, event, network, secs, mins): logging.info(\"publish event {} {}\".format(event,", "+= 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_SECOND, network, secs, mins) if", "mins}) def main(network): publisher = infrabbitmq_factory.event_publisher_json_serializer() secs = 0 mins", "= 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_MINUTE, network, secs, mins) if", "argparse import logging from infcommon import utils from infrabbitmq import", "'__main__': try: parser = argparse.ArgumentParser() parser.add_argument('-n', '--network', action='store', required=True, help='Network", "= argparse.ArgumentParser() parser.add_argument('-n', '--network', action='store', required=True, help='Network name (ilo, c2k,", "infrabbitmq import factory as infrabbitmq_factory from infrabbitmq.rabbitmq import RabbitMQError from", "mins % 5 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_5_MINUTES, network,", "network, secs, mins) if mins % 5 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions,", "publisher.publish(event, network, data={'tick': secs, 'mins': mins}) def main(network): publisher =", "( TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES, ) def publish_event(publisher, event,", "factory as infrabbitmq_factory from infrabbitmq.rabbitmq import RabbitMQError from infrabbitmq.events_names import", "KeyError,) while True: time.sleep(1) secs += 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher,", "args = parser.parse_args() network = args.network.split('-')[0] main(network) except Exception as", "mins = 0 rabbitmq_exceptions = (RabbitMQError, puka.AMQPError, KeyError,) while True:", "import time import puka import argparse import logging from infcommon", "0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_60_MINUTES, network, secs, mins) if __name__", "secs, mins) if __name__ == '__main__': try: parser = argparse.ArgumentParser()", "0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_5_MINUTES, network, secs, mins) if mins", "60 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_60_MINUTES, network, secs, mins)", "utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_2_MINUTES, network, secs, mins) if mins %", "= (RabbitMQError, puka.AMQPError, KeyError,) while True: time.sleep(1) secs += 1", "infcommon import utils from infrabbitmq import factory as infrabbitmq_factory from", "<reponame>aleasoluciones/infrabbitmq<filename>bin/ticker.py # -*- coding: utf-8 -*- import time import puka", "...)') args = parser.parse_args() network = args.network.split('-')[0] main(network) except Exception", "-*- coding: utf-8 -*- import time import puka import argparse", "help='Network name (ilo, c2k, ...)') args = parser.parse_args() network =", "secs = 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_MINUTE, network, secs, mins)", "data={'tick': secs, 'mins': mins}) def main(network): publisher = infrabbitmq_factory.event_publisher_json_serializer() secs", "== '__main__': try: parser = argparse.ArgumentParser() parser.add_argument('-n', '--network', action='store', required=True,", "== 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_2_MINUTES, network, secs, mins) if", "-*- import time import puka import argparse import logging from", "0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_2_MINUTES, network, secs, mins) if mins", "as infrabbitmq_factory from infrabbitmq.rabbitmq import RabbitMQError from infrabbitmq.events_names import (", "= parser.parse_args() network = args.network.split('-')[0] main(network) except Exception as exc:", "import puka import argparse import logging from infcommon import utils", "+= 1 secs = 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_MINUTE, network,", "secs, 'mins': mins}) def main(network): publisher = infrabbitmq_factory.event_publisher_json_serializer() secs =", "secs, mins) if secs % 60 == 0: mins +=", "infrabbitmq_factory from infrabbitmq.rabbitmq import RabbitMQError from infrabbitmq.events_names import ( TICK_1_SECOND,", "TICK_2_MINUTES, network, secs, mins) if mins % 5 == 0:", "5 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_5_MINUTES, network, secs, mins)", "secs, mins) if mins % 2 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event,", "secs % 60 == 0: mins += 1 secs =", "{} {}\".format(event, secs)) publisher.publish(event, network, data={'tick': secs, 'mins': mins}) def", "puka.AMQPError, KeyError,) while True: time.sleep(1) secs += 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event,", "0 mins = 0 rabbitmq_exceptions = (RabbitMQError, puka.AMQPError, KeyError,) while", "event {} {}\".format(event, secs)) publisher.publish(event, network, data={'tick': secs, 'mins': mins})", "publish_event, publisher, TICK_1_SECOND, network, secs, mins) if secs % 60", "2 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_2_MINUTES, network, secs, mins)", "publisher, TICK_1_SECOND, network, secs, mins) if secs % 60 ==", "time import puka import argparse import logging from infcommon import", "mins % 60 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_60_MINUTES, network,", "network, secs, mins) if mins % 60 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions,", "import logging from infcommon import utils from infrabbitmq import factory", "TICK_5_MINUTES, TICK_60_MINUTES, ) def publish_event(publisher, event, network, secs, mins): logging.info(\"publish", "TICK_60_MINUTES, ) def publish_event(publisher, event, network, secs, mins): logging.info(\"publish event", "infrabbitmq.rabbitmq import RabbitMQError from infrabbitmq.events_names import ( TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES,", "if mins % 2 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_2_MINUTES,", "TICK_1_MINUTE, network, secs, mins) if mins % 2 == 0:", "TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES, ) def publish_event(publisher, event, network, secs, mins):", "coding: utf-8 -*- import time import puka import argparse import", "from infrabbitmq.events_names import ( TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES, )", "1 secs = 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_MINUTE, network, secs,", "mins) if secs % 60 == 0: mins += 1", "utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_MINUTE, network, secs, mins) if mins %", "publish_event(publisher, event, network, secs, mins): logging.info(\"publish event {} {}\".format(event, secs))", "utils from infrabbitmq import factory as infrabbitmq_factory from infrabbitmq.rabbitmq import", "infrabbitmq_factory.event_publisher_json_serializer() secs = 0 mins = 0 rabbitmq_exceptions = (RabbitMQError,", "0: mins += 1 secs = 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher,", "import ( TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES, ) def publish_event(publisher,", "TICK_5_MINUTES, network, secs, mins) if mins % 60 == 0:", "% 60 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_60_MINUTES, network, secs,", "mins) if mins % 60 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher,", ") def publish_event(publisher, event, network, secs, mins): logging.info(\"publish event {}", "'--network', action='store', required=True, help='Network name (ilo, c2k, ...)') args =", "argparse.ArgumentParser() parser.add_argument('-n', '--network', action='store', required=True, help='Network name (ilo, c2k, ...)')", "from infrabbitmq import factory as infrabbitmq_factory from infrabbitmq.rabbitmq import RabbitMQError", "utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_5_MINUTES, network, secs, mins) if mins %", "network, secs, mins): logging.info(\"publish event {} {}\".format(event, secs)) publisher.publish(event, network,", "puka import argparse import logging from infcommon import utils from", "name (ilo, c2k, ...)') args = parser.parse_args() network = args.network.split('-')[0]", "__name__ == '__main__': try: parser = argparse.ArgumentParser() parser.add_argument('-n', '--network', action='store',", "% 2 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_2_MINUTES, network, secs,", "secs = 0 mins = 0 rabbitmq_exceptions = (RabbitMQError, puka.AMQPError,", "(ilo, c2k, ...)') args = parser.parse_args() network = args.network.split('-')[0] main(network)", "secs += 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_SECOND, network, secs, mins)", "0 rabbitmq_exceptions = (RabbitMQError, puka.AMQPError, KeyError,) while True: time.sleep(1) secs", "RabbitMQError from infrabbitmq.events_names import ( TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES,", "publisher = infrabbitmq_factory.event_publisher_json_serializer() secs = 0 mins = 0 rabbitmq_exceptions", "secs, mins) if mins % 5 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event,", "= 0 mins = 0 rabbitmq_exceptions = (RabbitMQError, puka.AMQPError, KeyError,)", "publisher, TICK_1_MINUTE, network, secs, mins) if mins % 2 ==", "TICK_1_SECOND, network, secs, mins) if secs % 60 == 0:", "# -*- coding: utf-8 -*- import time import puka import", "% 5 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_5_MINUTES, network, secs,", "mins += 1 secs = 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_MINUTE,", "network = args.network.split('-')[0] main(network) except Exception as exc: logging.critical(\"Ticker Fails:", "utf-8 -*- import time import puka import argparse import logging", "event, network, secs, mins): logging.info(\"publish event {} {}\".format(event, secs)) publisher.publish(event,", "c2k, ...)') args = parser.parse_args() network = args.network.split('-')[0] main(network) except", "'mins': mins}) def main(network): publisher = infrabbitmq_factory.event_publisher_json_serializer() secs = 0", "network, secs, mins) if secs % 60 == 0: mins", "network, secs, mins) if mins % 2 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions,", "network, secs, mins) if __name__ == '__main__': try: parser =" ]
[ "a pretrained model), or - the model was saved using", "let the specific kwargs override the common ones in case", "be instantiated as a transformer architecture with one of the", "2.0 (the \"License\"); # you may not use this file", "been done) - If a configuration is not provided, ``kwargs``", "containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``. - a", "used if you want to create a model from a", "model afterwards. decoder_pretrained_model_name_or_path: information necessary to initiate the decoder. Either:", "the decoder will be initialized with the pretrained weight (the", "path is slower than converting the TensorFlow checkpoint in a", "We need for each to get down to the embedding", "underlying model's ``__init__`` method config: (`optional`) instance of a class", "a model from a pretrained configuration but load your own", "Load and initialize the encoder and decoder # The distinction", "configuration object (after it being loaded) and initiate the model.", "of keyword arguments. \"\"\" # keyword arguments come in 3", "value in kwargs.items() if argument.startswith(\"decoder_\") } ) # Load and", "loaded) and initiate the model. (e.g. ``output_attention=True``). Behave differently depending", "be passed to the underlying model's ``__init__`` function. You can", "model checkpoints. The model is set in evaluation mode by", "and a decoder from one or two base classes of", "``pretrained_model_name_or_path`` and a configuration JSON file named `config.json` is found", "to the configuration class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of", "classes of the library from pre-trained model checkpoints. The model", "parameters that will be used to initialize a\" \" torch.nn.LSTM", "path to a pretrained model is specified the encoder and", "string with the `shortcut name` of a pre-trained model to", "if ( \"bert\" not in pretrained_model_name_or_path or \"roberta\" in pretrained_model_name_or_path", "the configuration have already been done) - If a configuration", "encoder and decoder # The distinction between encoder and decoder", "(prefixed by # `encoder_`), decoder-specific (prefixed by `decoder_`) and those", "override the common ones in case of conflict. kwargs_common =", "import nn from .modeling_auto import AutoModel, AutoModelWithLMHead logger = logging.getLogger(__name__)", "License for the specific language governing permissions and # limitations", "do not correspond to any configuration attribute will be passed", "decoder_input_ids, **kwargs): \"\"\" The forward pass on a seq2eq depends", "raise ValueError(\"Only the Bert model is currently supported.\") model =", "initialize a\" \" torch.nn.LSTM model as `decoder_config` keyword argument. \"", "a downloaded pre-trained model configuration should be cached if the", "encoder if an argument named `encoder_hidden_state` is passed to this", "both of the encoder and decoder are of the same", "False: Force to (re-)download the model weights and configuration files", "will be initialized with the pretrained weight (the cross-attention will", "Configuration for the model to use instead of an automatically", "decoder's parameters in two separate directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory,", "necessary to initiate the decoder. Either: - a string with", "converting the TensorFlow checkpoint in a PyTorch model using the", "sepcific for the encoder and decoder by prefixing the key", "def __init__(self, encoder, decoder): super(PreTrainedEncoderDecoder, self).__init__() self.encoder = encoder self.decoder", "Decode kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get( \"attention_mask\", None )", "possible to override this behavior and initialize, say, the decoder", "files and override the cached versions if they exists. proxies:", "`is_decoder` that we need to set correctly. encoder = kwargs_encoder.pop(\"model\",", "first set it back in training mode with `model.train()` Params:", "``decoder_output_attention=True``). The remaining kwargs will be passed to both encoders", "embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings - XLMModel: embeddings - GPT2: wte", "encoder = AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder ) encoder.config.is_decoder = False", "the `shortcut name` of a pre-trained model to load from", "dictionary of configuration parameters that will be used to initialize", "} ) # Load and initialize the encoder and decoder", "**kwargs ) return model class Model2LSTM(PreTrainedEncoderDecoder): @classmethod def from_pretrained(cls, *args,", "standard cache should not be used. force_download: (`optional`) boolean, default", "with the pretrained weight (the cross-attention will be intialized randomly", "initialize, say, the decoder randomly by creating it beforehand as", "is not provided, ``kwargs`` will be first passed to the", "assume all relevant updates to the configuration have already been", "class PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is a generic model class that", "to also return a dictionnary containing missing keys, unexpected keys", "keyword arguments come in 3 flavors: encoder-specific (prefixed by #", "'bert-base-uncased') # initialize Bert2Bert \"\"\" # keyword arguments come in", "OF ANY KIND, either express or implied. # See the", "correctly. encoder = kwargs_encoder.pop(\"model\", None) if encoder is None: encoder", "See the License for the specific language governing permissions and", "will be instantiated as a transformer architecture with one of", "to in writing, software # distributed under the License is", "flag `is_decoder` that we need to set correctly. encoder =", "to that respect: - BertModel: embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings -", "when: - the model is a model provided by the", "loaded from saved weights file. This option can be used", "endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The proxies are used", "= BertForMaskedLM(config) model = Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\" def __init__(self, *args,", "arguments. Can be used to update the configuration object (after", "a decoder from one or two base classes of the", "or agreed to in writing, software # distributed under the", "suppling the save directory. - the model is loaded by", "kwargs_encoder.get( \"attention_mask\", None ) decoder_outputs = self.decoder(decoder_input_ids, **kwargs_decoder) return decoder_outputs", "in kwargs: raise ValueError( \"To load an LSTM in Encoder-Decoder", "kwargs.items() if not argument.startswith(\"encoder_\") and not argument.startswith(\"decoder_\") } kwargs_decoder =", "JSON file named `config.json` is found in the directory. state_dict:", "that will be used to initialize a\" \" torch.nn.LSTM model", "value for argument, value in kwargs.items() if argument.startswith(\"decoder_\") } )", "(we assume all relevant updates to the configuration have already", "def forward(self, encoder_input_ids, decoder_input_ids, **kwargs): \"\"\" The forward pass on", "should not be used. force_download: (`optional`) boolean, default False: Force", "compliance with the License. # You may obtain a copy", "be used if you want to create a model from", "passes with the encoder's hidden state through the decoder to", "several forward passes with the encoder's hidden state through the", "the forward pass on the encoder if an argument named", "encoder and decoder by prefixing the key with `encoder_` and", "All remaning positional arguments will be passed to the underlying", "layer hidden state else: encoder_outputs = () # Decode kwargs_decoder[\"encoder_hidden_states\"]", "kwargs_common.copy() kwargs_encoder = kwargs_common.copy() kwargs_encoder.update( { argument[len(\"encoder_\") :]: value for", "model weights and configuration files and override the cached versions", "keyword arguments. \"\"\" # keyword arguments come in 3 flavors:", "proxies: (`optional`) dict, default None: A dictionary of proxy servers", "\" \" - a torch.nn.LSTM model as `decoder_model` parameter (`decoder_model=lstm_model`),", "not use this file except in compliance with the License.", "pretrained model), or - the model was saved using :func:`~transformers.PreTrainedModel.save_pretrained`", "boolean: Set to ``True`` to also return a dictionnary containing", "(`optional`) boolean: Set to ``True`` to also return a dictionnary", ":]: value for argument, value in kwargs.items() if argument.startswith(\"encoder_\") }", "License. \"\"\" Classes to support Encoder-Decoder architectures \"\"\" from __future__", "you may not use this file except in compliance with", "\" - a dictionary of configuration parameters that will be", "library as encoder and (optionally) another one as decoder when", "forward pass through the encoder, and then perform several forward", "kwargs sepcific for the encoder and decoder by prefixing the", "- If a configuration is provided with ``config``, ``**kwargs`` will", "or \"roberta\" in pretrained_model_name_or_path or \"distilbert\" in pretrained_model_name_or_path ): raise", "Inc. team. # # Licensed under the Apache License, Version", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "forward pass on a seq2eq depends what we are performing:", "- RoBERTa: embeddings.word_embeddings - XLMModel: embeddings - GPT2: wte -", "= kwargs_common.copy() kwargs_encoder.update( { argument[len(\"encoder_\") :]: value for argument, value", "output the last layer hidden state else: encoder_outputs = ()", "a pretrained configuration but load your own weights. In this", "a pretrained model is specified the encoder and the decoder", "Remaining dictionary of keyword arguments. Can be used to update", "should be set to True and a configuration object should", "that it can be loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save the", "at the model level is made # by the value", "not be used. force_download: (`optional`) boolean, default False: Force to", "when created with the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class method. \"\"\" def __init__(self,", "be used. force_download: (`optional`) boolean, default False: Force to (re-)download", "either: \" \" - a torch.nn.LSTM model as `decoder_model` parameter", "`config` is provided or automatically loaded: - If a configuration", "encoder, and then perform several forward passes with the encoder's", "or \"distilbert\" in pretrained_model_name_or_path ): raise ValueError(\"Only the Bert model", "a transformer architecture with one of the base model classes", "for argument, value in kwargs.items() if argument.startswith(\"decoder_\") } ) #", "as a transformer architecture with one of the base model", "the encoder and decoder are of the same family. If", "# Copyright 2018 The HuggingFace Inc. team. # # Licensed", "encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs ) return model class Model2LSTM(PreTrainedEncoderDecoder): @classmethod", "model was saved using :func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded by suppling", "by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The", "model using the provided conversion scripts and loading the PyTorch", "decoder by prefixing the key with `encoder_` and `decoder_` respectively.", "can specify kwargs sepcific for the encoder and decoder by", "): r\"\"\" Instantiates an encoder and a decoder from one", "in kwargs.items() if argument.startswith(\"decoder_\") } ) # Load and initialize", "encoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of encoder input", "XEmbedding layer for each model, but it is \"blocked\" by", "that respect: - BertModel: embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings - XLMModel:", "embeddings together. We need for each to get down to", "bert.embeddings.word_embeddings - RobertaForMaskedLM: roberta.embeddings.word_embeddings argument of the XEmbedding layer for", "support Encoder-Decoder architectures \"\"\" from __future__ import absolute_import, division, print_function,", "BertModel: embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings - XLMModel: embeddings - GPT2:", "as `decoder_config` keyword argument. \" \" E.g. `decoder_config={'input_size': 768, 'hidden_size':", "by suppling a local directory as ``pretrained_model_name_or_path`` and a configuration", "Set to ``True`` to also return a dictionnary containing missing", "the provided conversion scripts and loading the PyTorch model afterwards.", "a Seq2Seq2 model where both of the encoder and decoder", "encoder_outputs[ 0 ] # output the last layer hidden state", "# coding=utf-8 # Copyright 2018 The HuggingFace Inc. team. #", "of keyword arguments. Can be used to update the configuration", "sequence tokens in the vocabulary. kwargs: (`optional`) Remaining dictionary of", "In this case, ``from_tf`` should be set to True and", "encoder, decoder): super(PreTrainedEncoderDecoder, self).__init__() self.encoder = encoder self.decoder = decoder", "automatically loaded when: - the model is a model provided", "__init__(self, encoder, decoder): super(PreTrainedEncoderDecoder, self).__init__() self.encoder = encoder self.decoder =", "model, but it is \"blocked\" by a model-specific keyword (bert,", "the encoder and decoder; - During prediction, we perform one", "a `tensorflow index checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In this case,", "# Load and initialize the encoder and decoder # The", "Copyright 2018 The HuggingFace Inc. team. # # Licensed under", "decoder randomly by creating it beforehand as follows config =", "configuration JSON file named `config.json` is found in the directory.", "family. If the name of or that path to a", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "the model was saved using :func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded by", "Examples:: model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert \"\"\" #", "state dictionnary for the model to use instead of a", "can be loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save the encoder' and", "import torch from torch import nn from .modeling_auto import AutoModel,", "to a pretrained model is specified the encoder and the", "be first passed to the configuration class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`).", "model to load from cache or download, e.g.: ``bert-base-uncased``. -", "checkpoint in a PyTorch model using the provided conversion scripts", "should check if using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is not a", "on whether a `config` is provided or automatically loaded: -", "pass) encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\", None) if encoder_hidden_states is None: encoder_outputs", "= kwargs_decoder.pop(\"model\", None) if decoder is None: decoder = AutoModelWithLMHead.from_pretrained(", "a torch.nn.LSTM model as `decoder_model` parameter (`decoder_model=lstm_model`), or\" \" -", "*args, **kwargs): if kwargs.get(\"decoder_model\", None) is None: # We will", "{ argument: value for argument, value in kwargs.items() if not", "conversion scripts and loading the PyTorch model afterwards. model_args: (`optional`)", "a Seq2Seq model and its configuration file in a format", "a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``.", "file except in compliance with the License. # You may", "argument[len(\"decoder_\") :]: value for argument, value in kwargs.items() if argument.startswith(\"decoder_\")", "the value of the flag `is_decoder` that we need to", "remaning positional arguments will be passed to the underlying model's", "override the cached versions if they exists. proxies: (`optional`) dict,", "hidden state through the decoder to decode a full sequence.", "base classes of the library from pre-trained model checkpoints. The", "automatically loaded: - If a configuration is provided with ``config``,", "the vocabulary. decoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of", "the underlying model's ``__init__`` method (we assume all relevant updates", "is passed to this function. Params: encoder_input_ids: ``torch.LongTensor`` of shape", "a class derived from :class:`~transformers.PretrainedConfig`: Configuration for the model to", "a dictionnary containing missing keys, unexpected keys and error messages.", "- RobertaForMaskedLM: roberta.embeddings.word_embeddings argument of the XEmbedding layer for each", "HuggingFace Inc. team. # # Licensed under the Apache License,", "and initialize, say, the decoder randomly by creating it beforehand", "of ``kwargs`` that corresponds to a configuration attribute will be", "configuration files and override the cached versions if they exists.", "a string with the `shortcut name` of a pre-trained model", "positional arguments will be passed to the underlying model's ``__init__``", "a format such that it can be loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained`", "own weights. In this case though, you should check if", "using `model.eval()` (Dropout modules are deactivated) To train the model,", "to any configuration attribute will be passed to the underlying", "supported.\") model = super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs )", "model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``. - a path", "\" torch.nn.LSTM model as `decoder_config` keyword argument. \" \" E.g.", "return model def save_pretrained(self, save_directory): \"\"\" Save a Seq2Seq model", "``True`` to also return a dictionnary containing missing keys, unexpected", "model's ``__init__`` method (we assume all relevant updates to the", "If a configuration is not provided, ``kwargs`` will be first", "and those # that apply to the model as whole.", "keys that do not correspond to any configuration attribute will", "configuration should be cached if the standard cache should not", "positional arguments: All remaning positional arguments will be passed to", "weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``. - a path or", "vocabulary. kwargs: (`optional`) Remaining dictionary of keyword arguments. \"\"\" #", "Indices of decoder input sequence tokens in the vocabulary. kwargs:", "decoder = BertForMaskedLM(config) model = Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\" def __init__(self,", "KIND, either express or implied. # See the License for", "Configuration can be automatically loaded when: - the model is", "- the model is loaded by suppling a local directory", "architectures \"\"\" from __future__ import absolute_import, division, print_function, unicode_literals import", "e.g.: ``./my_model_directory/decoder``. - a path or url to a `tensorflow", "to use instead of an automatically loaded configuation. Configuration can", "by a model-specific keyword (bert, )... \"\"\" # self._tie_or_clone_weights(self.encoder, self.decoder)", "self.decoder = decoder @classmethod def from_pretrained( cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args,", "we are performing: - During training we perform one forward", "is currently supported.\") model = super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args,", "decoder_pretrained_model_name_or_path=None, *model_args, **kwargs ): r\"\"\" Instantiates an encoder and a", "(the \"License\"); # you may not use this file except", "created with the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class method. \"\"\" def __init__(self, encoder,", "the library from pre-trained model checkpoints. The model is set", "format such that it can be loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We", "provided with ``config``, ``**kwargs`` will be directly passed to the", "attribute with the supplied ``kwargs`` value. Remaining keys that do", "pretrained model is specified the encoder and the decoder will", "using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save the encoder' and decoder's parameters in", "of configuration parameters that will be used to initialize a\"", "Classes to support Encoder-Decoder architectures \"\"\" from __future__ import absolute_import,", "# # Unless required by applicable law or agreed to", "print_function, unicode_literals import logging import os import torch from torch", "This option can be used if you want to create", "(e.g. ``decoder_output_attention=True``). The remaining kwargs will be passed to both", ") decoder.config.is_decoder = True model = cls(encoder, decoder) return model", "Encoder-Decoder architectures \"\"\" from __future__ import absolute_import, division, print_function, unicode_literals", "loaded configuation. Configuration can be automatically loaded when: - the", "and loading the PyTorch model afterwards. model_args: (`optional`) Sequence of", "`./tf_model/model.ckpt.index`). In this case, ``from_tf`` should be set to True", "level is made # by the value of the flag", "modules are deactivated) To train the model, you need to", "r\"\"\" Instantiates an encoder and a decoder from one or", "\"blocked\" by a model-specific keyword (bert, )... \"\"\" # self._tie_or_clone_weights(self.encoder,", "implied. # See the License for the specific language governing", "as `decoder_model` parameter (`decoder_model=lstm_model`), or\" \" - a dictionary of", "case, ``from_tf`` should be set to True and a configuration", "as ``pretrained_model_name_or_path`` and a configuration JSON file named `config.json` is", "same family. If the name of or that path to", "function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs`` that corresponds to a", "`decoder_` respectively. (e.g. ``decoder_output_attention=True``). The remaining kwargs will be passed", "value in kwargs.items() if argument.startswith(\"decoder_\") } ) # Encode if", "versions if they exists. proxies: (`optional`) dict, default None: A", "model where both of the encoder and decoder are of", "be automatically loaded when: - the model is a model", "or\" \" - a dictionary of configuration parameters that will", "attribute will be passed to the underlying model's ``__init__`` function.", "tie_weights(self): \"\"\" Tying the encoder and decoders' embeddings together. We", "using :func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded by suppling the save directory.", "True and a configuration object should be provided as ``config``", "a simpler option. cache_dir: (`optional`) string: Path to a directory", "# keyword arguments come in 3 flavors: encoder-specific (prefixed by", "model = super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs ) return", "depending on whether a `config` is provided or automatically loaded:", "``kwargs`` value. Remaining keys that do not correspond to any", "model's ``__init__`` function. You can specify kwargs sepcific for the", "will be passed to the underlying model's ``__init__`` method config:", "by `decoder_`) and those # that apply to the model", "not a simpler option. cache_dir: (`optional`) string: Path to a", "in kwargs.items() if not argument.startswith(\"encoder_\") and not argument.startswith(\"decoder_\") } kwargs_decoder", "conversion scripts and loading the PyTorch model afterwards. decoder_pretrained_model_name_or_path: information", "Unless required by applicable law or agreed to in writing,", "for argument, value in kwargs.items() if not argument.startswith(\"encoder_\") and not", "value of the flag `is_decoder` that we need to set", "the name of or that path to a pretrained model", "model is a model provided by the library (loaded with", "The model is set in evaluation mode by default using", "is loaded by suppling a local directory as ``pretrained_model_name_or_path`` and", "- a torch.nn.LSTM model as `decoder_model` parameter (`decoder_model=lstm_model`), or\" \"", "the specific language governing permissions and # limitations under the", "if using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is not a simpler option.", "is a model provided by the library (loaded with the", "is None: # We will create a randomly initilized LSTM", "decoder_pretrained_model_name_or_path, **kwargs_decoder ) decoder.config.is_decoder = True model = cls(encoder, decoder)", "by the library (loaded with the ``shortcut-name`` string of a", "be passed to the underlying model's ``__init__`` method config: (`optional`)", "Encoder-Decoder model, please supply either: \" \" - a torch.nn.LSTM", "be passed to both encoders and decoders. Examples:: model =", "different model classes are inconsistent to that respect: - BertModel:", "the model to use instead of a state dictionary loaded", "cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs ) return model class Model2LSTM(PreTrainedEncoderDecoder):", "messages. kwargs: (`optional`) Remaining dictionary of keyword arguments. Can be", "``output_attention=True``). Behave differently depending on whether a `config` is provided", "name of or that path to a pretrained model is", "in training mode with `model.train()` Params: encoder_pretrained_model_name_or_path: information necessary to", "save_pretrained(self, save_directory): \"\"\" Save a Seq2Seq model and its configuration", "have already been done) - If a configuration is not", "weights. In this case though, you should check if using", "@classmethod def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): if ( \"bert\" not", ") # Encode if needed (training, first prediction pass) encoder_hidden_states", "intialized randomly if its weights are not present). It is", "model provided by the library (loaded with the ``shortcut-name`` string", "to the configuration have already been done) - If a", "decoder when created with the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class method. \"\"\" def", "Indices of encoder input sequence tokens in the vocabulary. decoder_input_ids:", "the last layer hidden state else: encoder_outputs = () #", ":func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is not a simpler option. cache_dir: (`optional`)", "True model = cls(encoder, decoder) return model def save_pretrained(self, save_directory):", "unicode_literals import logging import os import torch from torch import", "``kwargs`` will be first passed to the configuration class initialization", "\"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def forward(self, encoder_input_ids, decoder_input_ids, **kwargs): \"\"\" The", "model def save_pretrained(self, save_directory): \"\"\" Save a Seq2Seq model and", "= kwargs_common.copy() kwargs_encoder = kwargs_common.copy() kwargs_encoder.update( { argument[len(\"encoder_\") :]: value", "should be cached if the standard cache should not be", "@classmethod def from_pretrained( cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args, **kwargs ): r\"\"\"", "``(batch_size, sequence_length)`` Indices of encoder input sequence tokens in the", "configuration but load your own weights. In this case though,", "the base model classes of the library as encoder and", "``from_tf`` should be set to True and a configuration object", "encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\", None) if encoder_hidden_states is None: encoder_outputs =", "if decoder is None: decoder = AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder )", "(`optional`) dict: an optional state dictionnary for the model to", "(after it being loaded) and initiate the model. (e.g. ``output_attention=True``).", "BertForMaskedLM: bert.embeddings.word_embeddings - RobertaForMaskedLM: roberta.embeddings.word_embeddings argument of the XEmbedding layer", ") kwargs[\"decoder_model\"] = torch.nn.LSTM(kwargs.pop(\"decoder_config\")) model = super(Model2LSTM, cls).from_pretrained(*args, **kwargs) return", "absolute_import, division, print_function, unicode_literals import logging import os import torch", "done) - If a configuration is not provided, ``kwargs`` will", "pretrained_model_name_or_path or \"roberta\" in pretrained_model_name_or_path or \"distilbert\" in pretrained_model_name_or_path ):", "} ) kwargs_decoder.update( { argument[len(\"decoder_\") :]: value for argument, value", "as decoder when created with the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class method. \"\"\"", "of a state dictionary loaded from saved weights file. This", "by the value of the flag `is_decoder` that we need", "randomly initilized LSTM model as decoder if \"decoder_config\" not in", "instance of a class derived from :class:`~transformers.PretrainedConfig`: Configuration for the", "\"\"\" Classes to support Encoder-Decoder architectures \"\"\" from __future__ import", "be used to initialize a\" \" torch.nn.LSTM model as `decoder_config`", "of the encoder and decoder are of the same family.", "weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``. - a path or", "logging import os import torch from torch import nn from", "an optional state dictionnary for the model to use instead", "index checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In this case, ``from_tf`` should", ") decoder_outputs = self.decoder(decoder_input_ids, **kwargs_decoder) return decoder_outputs + encoder_outputs class", "`config.json` is found in the directory. state_dict: (`optional`) dict: an", "loaded: - If a configuration is provided with ``config``, ``**kwargs``", "embeddings - GPT2: wte - BertForMaskedLM: bert.embeddings.word_embeddings - RobertaForMaskedLM: roberta.embeddings.word_embeddings", "You may obtain a copy of the License at #", "shape ``(batch_size, sequence_length)`` Indices of encoder input sequence tokens in", "+ encoder_outputs class Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model` instantiates a Seq2Seq2 model", "the encoder and decoder # The distinction between encoder and", "information necessary to initiate the decoder. Either: - a string", "Sequence of positional arguments: All remaning positional arguments will be", "in the directory. state_dict: (`optional`) dict: an optional state dictionnary", "supplied ``kwargs`` value. Remaining keys that do not correspond to", "model level is made # by the value of the", "of conflict. kwargs_common = { argument: value for argument, value", "`encoder_` and `decoder_` respectively. (e.g. ``decoder_output_attention=True``). The remaining kwargs will", "pretrained weight (the cross-attention will be intialized randomly if its", "each request. output_loading_info: (`optional`) boolean: Set to ``True`` to also", "to initiate the encoder. Either: - a string with the", "function. Params: encoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of", "is possible to override this behavior and initialize, say, the", "configuration object should be provided as ``config`` argument. This loading", "model = Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\" def __init__(self, *args, **kwargs): super(Model2Model,", "from pre-trained model checkpoints. The model is set in evaluation", "decoder if \"decoder_config\" not in kwargs: raise ValueError( \"To load", "r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is a generic model class that will be", "to create a model from a pretrained configuration but load", "**kwargs): if ( \"bert\" not in pretrained_model_name_or_path or \"roberta\" in", "A dictionary of proxy servers to use by protocol or", "one or two base classes of the library from pre-trained", "or - the model was saved using :func:`~transformers.PreTrainedModel.save_pretrained` and is", "directly passed to the underlying model's ``__init__`` method (we assume", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "a state dictionary loaded from saved weights file. This option", "remaining kwargs will be passed to both encoders and decoders.", "ones in case of conflict. kwargs_common = { argument: value", "# by the value of the flag `is_decoder` that we", "to the underlying model's ``__init__`` method config: (`optional`) instance of", "None: # We will create a randomly initilized LSTM model", "in a PyTorch model using the provided conversion scripts and", "**kwargs) self.tie_weights() def tie_weights(self): \"\"\" Tying the encoder and decoders'", "for the model to use instead of an automatically loaded", "kwargs: (`optional`) Remaining dictionary of keyword arguments. \"\"\" # keyword", "use instead of an automatically loaded configuation. Configuration can be", "instead of a state dictionary loaded from saved weights file.", "import absolute_import, division, print_function, unicode_literals import logging import os import", "updates to the configuration have already been done) - If", "(prefixed by `decoder_`) and those # that apply to the", "If a configuration is provided with ``config``, ``**kwargs`` will be", "encoder_outputs = () # Decode kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] =", "self).__init__(*args, **kwargs) self.tie_weights() def tie_weights(self): \"\"\" Tying the encoder and", "= AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder ) encoder.config.is_decoder = False decoder", "import AutoModel, AutoModelWithLMHead logger = logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder`", "will be used to initialize a\" \" torch.nn.LSTM model as", "= kwargs_encoder.get( \"attention_mask\", None ) decoder_outputs = self.decoder(decoder_input_ids, **kwargs_decoder) return", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "\"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def forward(self, encoder_input_ids, decoder_input_ids, **kwargs):", "License. # You may obtain a copy of the License", "model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert \"\"\" # keyword", "in evaluation mode by default using `model.eval()` (Dropout modules are", "set it back in training mode with `model.train()` Params: encoder_pretrained_model_name_or_path:", "kwargs_encoder.pop(\"hidden_states\", None) if encoder_hidden_states is None: encoder_outputs = self.encoder(encoder_input_ids, **kwargs_encoder)", "(Dropout modules are deactivated) To train the model, you need", "of the library as encoder and (optionally) another one as", "to load from cache or download, e.g.: ``bert-base-uncased``. - a", "in 3 flavors: encoder-specific (prefixed by # `encoder_`), decoder-specific (prefixed", "decoder; - During prediction, we perform one forward pass through", "argument[len(\"encoder_\") :]: value for argument, value in kwargs.items() if argument.startswith(\"encoder_\")", "Instantiates an encoder and a decoder from one or two", "kwargs_encoder.update( { argument[len(\"encoder_\") :]: value for argument, value in kwargs.items()", "apply to the model as a whole. # We let", "of an automatically loaded configuation. Configuration can be automatically loaded", "through both the encoder and decoder; - During prediction, we", "key of ``kwargs`` that corresponds to a configuration attribute will", "in pretrained_model_name_or_path or \"roberta\" in pretrained_model_name_or_path or \"distilbert\" in pretrained_model_name_or_path", "Path to a directory in which a downloaded pre-trained model", "argument named `encoder_hidden_state` is passed to this function. Params: encoder_input_ids:", "of a class derived from :class:`~transformers.PretrainedConfig`: Configuration for the model", "to initialize a\" \" torch.nn.LSTM model as `decoder_config` keyword argument.", "**kwargs_encoder) encoder_hidden_states = encoder_outputs[ 0 ] # output the last", "or url to a `tensorflow index checkpoint file` (e.g. `./tf_model/model.ckpt.index`).", "\"\"\" def __init__(self, encoder, decoder): super(PreTrainedEncoderDecoder, self).__init__() self.encoder = encoder", "need to set correctly. encoder = kwargs_encoder.pop(\"model\", None) if encoder", "roberta.embeddings.word_embeddings argument of the XEmbedding layer for each model, but", "hidden state else: encoder_outputs = () # Decode kwargs_decoder[\"encoder_hidden_states\"] =", "the decoder randomly by creating it beforehand as follows config", "name` of a pre-trained model to load from cache or", "a configuration is not provided, ``kwargs`` will be first passed", "the encoder. Either: - a string with the `shortcut name`", "pretrained_model_name_or_path, *args, **kwargs): if ( \"bert\" not in pretrained_model_name_or_path or", "the decoder. Either: - a string with the `shortcut name`", "specific kwargs override the common ones in case of conflict.", "configuration is provided with ``config``, ``**kwargs`` will be directly passed", "Each key of ``kwargs`` that corresponds to a configuration attribute", "self.decoder(decoder_input_ids, **kwargs_decoder) return decoder_outputs + encoder_outputs class Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model`", "pass on a seq2eq depends what we are performing: -", "path to a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`,", "from __future__ import absolute_import, division, print_function, unicode_literals import logging import", "url to a `tensorflow index checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In", "is reloaded by suppling the save directory. - the model", "To train the model, you need to first set it", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "encoder.config.is_decoder = False decoder = kwargs_decoder.pop(\"model\", None) if decoder is", "self.tie_weights() def tie_weights(self): \"\"\" Tying the encoder and decoders' embeddings", "{ argument[len(\"encoder_\") :]: value for argument, value in kwargs.items() if", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "and decoder are of the same family. If the name", "prefixing the key with `encoder_` and `decoder_` respectively. (e.g. ``decoder_output_attention=True``).", "decoder input sequence tokens in the vocabulary. kwargs: (`optional`) Remaining", "by prefixing the key with `encoder_` and `decoder_` respectively. (e.g.", "please supply either: \" \" - a torch.nn.LSTM model as", "required by applicable law or agreed to in writing, software", "that path to a pretrained model is specified the encoder", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "PyTorch model afterwards. decoder_pretrained_model_name_or_path: information necessary to initiate the decoder.", "provided conversion scripts and loading the PyTorch model afterwards. model_args:", "the underlying model's ``__init__`` method config: (`optional`) instance of a", "``**kwargs`` will be directly passed to the underlying model's ``__init__``", "decoder. Either: - a string with the `shortcut name` of", "# `encoder_`), decoder-specific (prefixed by `decoder_`) and those # that", "decoder = AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder ) decoder.config.is_decoder = True model", "from_pretrained( cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args, **kwargs ): r\"\"\" Instantiates an", "a path to a `directory` containing model weights saved using", "agreed to in writing, software # distributed under the License", "else: encoder_outputs = () # Decode kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"]", "self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states = encoder_outputs[ 0 ] # output the", "distributed under the License is distributed on an \"AS IS\"", "kwargs_decoder.update( { argument[len(\"decoder_\") :]: value for argument, value in kwargs.items()", "apply to the model as whole. # We let the", "a `config` is provided or automatically loaded: - If a", "\"To load an LSTM in Encoder-Decoder model, please supply either:", "instantiates a Seq2Seq2 model where both of the encoder and", "from torch import nn from .modeling_auto import AutoModel, AutoModelWithLMHead logger", "2}`\" ) kwargs[\"decoder_model\"] = torch.nn.LSTM(kwargs.pop(\"decoder_config\")) model = super(Model2LSTM, cls).from_pretrained(*args, **kwargs)", "We let the specific kwargs override the common ones in", "than converting the TensorFlow checkpoint in a PyTorch model using", "and is reloaded by suppling the save directory. - the", "You can specify kwargs sepcific for the encoder and decoder", "model from a pretrained configuration but load your own weights.", "- During prediction, we perform one forward pass through the", "flavors: encoder-specific (prefixed by # `encoder_`), decoder-specific (prefixed by `decoder_`)", "with the encoder's hidden state through the decoder to decode", "- a dictionary of configuration parameters that will be used", "encoder is None: encoder = AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder )", "kwargs_decoder.pop(\"model\", None) if decoder is None: decoder = AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path,", "you need to first set it back in training mode", "of the flag `is_decoder` that we need to set correctly.", "kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get( \"attention_mask\", None ) decoder_outputs", "not provided, ``kwargs`` will be first passed to the configuration", "use instead of a state dictionary loaded from saved weights", "**kwargs ): r\"\"\" Instantiates an encoder and a decoder from", "encoder and decoder at the model level is made #", "those # that apply to the model as a whole.", "distinction between encoder and decoder at the model level is", "by suppling the save directory. - the model is loaded", "This loading path is slower than converting the TensorFlow checkpoint", "OR CONDITIONS OF ANY KIND, either express or implied. #", "the License is distributed on an \"AS IS\" BASIS, #", "the different model classes are inconsistent to that respect: -", "creating it beforehand as follows config = BertConfig.from_pretrained() decoder =", "any configuration attribute will be passed to the underlying model's", "Save a Seq2Seq model and its configuration file in a", "the underlying model's ``__init__`` function. You can specify kwargs sepcific", "respectively. (e.g. ``decoder_output_attention=True``). The remaining kwargs will be passed to", "decoder # The distinction between encoder and decoder at the", "underlying model's ``__init__`` function. You can specify kwargs sepcific for", "PyTorch model using the provided conversion scripts and loading the", "that we need to set correctly. encoder = kwargs_encoder.pop(\"model\", None)", "RoBERTa: embeddings.word_embeddings - XLMModel: embeddings - GPT2: wte - BertForMaskedLM:", "law or agreed to in writing, software # distributed under", "None: A dictionary of proxy servers to use by protocol", "the PyTorch model afterwards. decoder_pretrained_model_name_or_path: information necessary to initiate the", "parameter (`decoder_model=lstm_model`), or\" \" - a dictionary of configuration parameters", "come in 3 flavors: encoder-specific (prefixed by # `encoder_`), decoder-specific", "configuration class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs`` that", "which a downloaded pre-trained model configuration should be cached if", "may obtain a copy of the License at # #", "However the different model classes are inconsistent to that respect:", "save directory. - the model is loaded by suppling a", "pass on the encoder if an argument named `encoder_hidden_state` is", "class method. \"\"\" def __init__(self, encoder, decoder): super(PreTrainedEncoderDecoder, self).__init__() self.encoder", "decoder to decode a full sequence. Therefore, we skip the", ") # Load and initialize the encoder and decoder #", "may not use this file except in compliance with the", "proxies are used on each request. output_loading_info: (`optional`) boolean: Set", "whole. # We let the specific kwargs override the common", "in which a downloaded pre-trained model configuration should be cached", "decoders. Examples:: model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert \"\"\"", "this file except in compliance with the License. # You", "and decoders. Examples:: model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert", "decoder_outputs = self.decoder(decoder_input_ids, **kwargs_decoder) return decoder_outputs + encoder_outputs class Model2Model(PreTrainedEncoderDecoder):", "decoder_model=decoder) \"\"\" def __init__(self, *args, **kwargs): super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights()", "arguments. \"\"\" # keyword arguments come in 3 flavors: encoder-specific", "that corresponds to a configuration attribute will be used to", "# # Licensed under the Apache License, Version 2.0 (the", "AutoModel, AutoModelWithLMHead logger = logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is", "``__init__`` function. You can specify kwargs sepcific for the encoder", "need for each to get down to the embedding weights.", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "decoder = kwargs_decoder.pop(\"model\", None) if decoder is None: decoder =", "tokens in the vocabulary. kwargs: (`optional`) Remaining dictionary of keyword", "argument, value in kwargs.items() if argument.startswith(\"encoder_\") } ) kwargs_decoder.update( {", "argument.startswith(\"decoder_\") } ) # Encode if needed (training, first prediction", "a\" \" torch.nn.LSTM model as `decoder_config` keyword argument. \" \"", "local directory as ``pretrained_model_name_or_path`` and a configuration JSON file named", "team. # # Licensed under the Apache License, Version 2.0", "class Model2LSTM(PreTrainedEncoderDecoder): @classmethod def from_pretrained(cls, *args, **kwargs): if kwargs.get(\"decoder_model\", None)", "be loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save the encoder' and decoder's", "depends what we are performing: - During training we perform", "`tensorflow index checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In this case, ``from_tf``", "def from_pretrained(cls, *args, **kwargs): if kwargs.get(\"decoder_model\", None) is None: #", "\"\"\" Save a Seq2Seq model and its configuration file in", "one forward pass through the encoder, and then perform several", "def tie_weights(self): \"\"\" Tying the encoder and decoders' embeddings together.", "check if using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is not a simpler", "also return a dictionnary containing missing keys, unexpected keys and", "in kwargs.items() if argument.startswith(\"encoder_\") } ) kwargs_decoder.update( { argument[len(\"decoder_\") :]:", "argument of the XEmbedding layer for each model, but it", "be provided as ``config`` argument. This loading path is slower", "Therefore, we skip the forward pass on the encoder if", "or implied. # See the License for the specific language", "of positional arguments: All remaning positional arguments will be passed", "keys, unexpected keys and error messages. kwargs: (`optional`) Remaining dictionary", "with ``config``, ``**kwargs`` will be directly passed to the underlying", "to initiate the decoder. Either: - a string with the", "\"\"\" def __init__(self, *args, **kwargs): super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights() def", "model classes of the library as encoder and (optionally) another", "the model. (e.g. ``output_attention=True``). Behave differently depending on whether a", "``./my_model_directory/decoder``. - a path or url to a `tensorflow index", "made # by the value of the flag `is_decoder` that", "encoder = kwargs_encoder.pop(\"model\", None) if encoder is None: encoder =", "self.decoder) pass @classmethod def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): if (", "simpler option. cache_dir: (`optional`) string: Path to a directory in", "(training, first prediction pass) encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\", None) if encoder_hidden_states", "option. cache_dir: (`optional`) string: Path to a directory in which", "torch.nn.LSTM model as `decoder_model` parameter (`decoder_model=lstm_model`), or\" \" - a", "``__init__`` method (we assume all relevant updates to the configuration", "- the model is a model provided by the library", "supply either: \" \" - a torch.nn.LSTM model as `decoder_model`", "sequence tokens in the vocabulary. decoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size,", "inconsistent to that respect: - BertModel: embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings", "(`decoder_model=lstm_model`), or\" \" - a dictionary of configuration parameters that", "``__init__`` method config: (`optional`) instance of a class derived from", "of decoder input sequence tokens in the vocabulary. kwargs: (`optional`)", "transformer architecture with one of the base model classes of", "None) if decoder is None: decoder = AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder", "be cached if the standard cache should not be used.", ") return model class Model2LSTM(PreTrainedEncoderDecoder): @classmethod def from_pretrained(cls, *args, **kwargs):", ":func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``. - a path or url to a", "passed to both encoders and decoders. Examples:: model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased',", "None) if encoder is None: encoder = AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args,", "(re-)download the model weights and configuration files and override the", "\"distilbert\" in pretrained_model_name_or_path ): raise ValueError(\"Only the Bert model is", ") encoder.config.is_decoder = False decoder = kwargs_decoder.pop(\"model\", None) if decoder", "Params: encoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of encoder", "an encoder and a decoder from one or two base", "model is specified the encoder and the decoder will be", "division, print_function, unicode_literals import logging import os import torch from", "@classmethod def from_pretrained(cls, *args, **kwargs): if kwargs.get(\"decoder_model\", None) is None:", "it back in training mode with `model.train()` Params: encoder_pretrained_model_name_or_path: information", "the vocabulary. kwargs: (`optional`) Remaining dictionary of keyword arguments. \"\"\"", "GPT2: wte - BertForMaskedLM: bert.embeddings.word_embeddings - RobertaForMaskedLM: roberta.embeddings.word_embeddings argument of", "proxy servers to use by protocol or endpoint, e.g.: {'http':", "object should be provided as ``config`` argument. This loading path", "method config: (`optional`) instance of a class derived from :class:`~transformers.PretrainedConfig`:", "{ argument[len(\"decoder_\") :]: value for argument, value in kwargs.items() if", "of a pretrained model), or - the model was saved", "loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save the encoder' and decoder's parameters", "`directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``. -", "update the configuration object (after it being loaded) and initiate", "to ``True`` to also return a dictionnary containing missing keys,", "file in a format such that it can be loaded", "LSTM in Encoder-Decoder model, please supply either: \" \" -", "directory as ``pretrained_model_name_or_path`` and a configuration JSON file named `config.json`", ":func:`~transformers.PreTrainedModel.from_pretrained` is not a simpler option. cache_dir: (`optional`) string: Path", "used to override said attribute with the supplied ``kwargs`` value.", "(loaded with the ``shortcut-name`` string of a pretrained model), or", "import logging import os import torch from torch import nn", "this case though, you should check if using :func:`~transformers.PreTrainedModel.save_pretrained` and", "with one of the base model classes of the library", "**kwargs): if kwargs.get(\"decoder_model\", None) is None: # We will create", "skip the forward pass on the encoder if an argument", "it beforehand as follows config = BertConfig.from_pretrained() decoder = BertForMaskedLM(config)", "\"\"\" # self._tie_or_clone_weights(self.encoder, self.decoder) pass @classmethod def from_pretrained(cls, pretrained_model_name_or_path, *args,", "to (re-)download the model weights and configuration files and override", "is None: decoder = AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder ) decoder.config.is_decoder =", "provided conversion scripts and loading the PyTorch model afterwards. decoder_pretrained_model_name_or_path:", "encoder_pretrained_model_name_or_path: information necessary to initiate the encoder. Either: - a", "If the name of or that path to a pretrained", "(`optional`) boolean, default False: Force to (re-)download the model weights", "decoder at the model level is made # by the", "that apply to the model as a whole. # We", "of the library from pre-trained model checkpoints. The model is", "but load your own weights. In this case though, you", "respect: - BertModel: embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings - XLMModel: embeddings", "and :func:`~transformers.PreTrainedModel.from_pretrained` is not a simpler option. cache_dir: (`optional`) string:", "model as `decoder_model` parameter (`decoder_model=lstm_model`), or\" \" - a dictionary", "initiate the model. (e.g. ``output_attention=True``). Behave differently depending on whether", "decoder are of the same family. If the name of", "(:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs`` that corresponds to a configuration", "a pre-trained model to load from cache or download, e.g.:", "encoder self.decoder = decoder @classmethod def from_pretrained( cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None,", "saved using :func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded by suppling the save", "XLMModel: embeddings - GPT2: wte - BertForMaskedLM: bert.embeddings.word_embeddings - RobertaForMaskedLM:", "found in the directory. state_dict: (`optional`) dict: an optional state", "in writing, software # distributed under the License is distributed", "= cls(encoder, decoder) return model def save_pretrained(self, save_directory): \"\"\" Save", "base model classes of the library as encoder and (optionally)", "e.g.: ``./my_model_directory/encoder``. - a path or url to a `tensorflow", "to first set it back in training mode with `model.train()`", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "class derived from :class:`~transformers.PretrainedConfig`: Configuration for the model to use", "License, Version 2.0 (the \"License\"); # you may not use", "class that will be instantiated as a transformer architecture with", "and a configuration object should be provided as ``config`` argument.", "(`optional`) dict, default None: A dictionary of proxy servers to", "specified the encoder and the decoder will be initialized with", "LSTM model as decoder if \"decoder_config\" not in kwargs: raise", "a configuration attribute will be used to override said attribute", "the configuration object (after it being loaded) and initiate the", "saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``. - a path or url", "your own weights. In this case though, you should check", "initialize Bert2Bert \"\"\" # keyword arguments come in 3 flavors:", "scripts and loading the PyTorch model afterwards. model_args: (`optional`) Sequence", "keys and error messages. kwargs: (`optional`) Remaining dictionary of keyword", "the License for the specific language governing permissions and #", "to update the configuration object (after it being loaded) and", "# We let the specific kwargs override the common ones", ":class:`~transformers.Model2Model` instantiates a Seq2Seq2 model where both of the encoder", "__future__ import absolute_import, division, print_function, unicode_literals import logging import os", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "config = BertConfig.from_pretrained() decoder = BertForMaskedLM(config) model = Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder)", "loading the PyTorch model afterwards. model_args: (`optional`) Sequence of positional", ") kwargs_decoder.update( { argument[len(\"decoder_\") :]: value for argument, value in", "library from pre-trained model checkpoints. The model is set in", "decoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of decoder input", "to override this behavior and initialize, say, the decoder randomly", "`model.eval()` (Dropout modules are deactivated) To train the model, you", "is a generic model class that will be instantiated as", "it is \"blocked\" by a model-specific keyword (bert, )... \"\"\"", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "None) is None: # We will create a randomly initilized", "`decoder_model` parameter (`decoder_model=lstm_model`), or\" \" - a dictionary of configuration", "passed to the underlying model's ``__init__`` method (we assume all", "and loading the PyTorch model afterwards. decoder_pretrained_model_name_or_path: information necessary to", "in pretrained_model_name_or_path ): raise ValueError(\"Only the Bert model is currently", "first passed to the configuration class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each", "it being loaded) and initiate the model. (e.g. ``output_attention=True``). Behave", "decoder from one or two base classes of the library", "keyword arguments. Can be used to update the configuration object", "logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is a generic model class", "the model level is made # by the value of", "forward(self, encoder_input_ids, decoder_input_ids, **kwargs): \"\"\" The forward pass on a", "directory in which a downloaded pre-trained model configuration should be", "we need to set correctly. encoder = kwargs_encoder.pop(\"model\", None) if", "it can be loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save the encoder'", "# distributed under the License is distributed on an \"AS", "- a path or url to a `tensorflow index checkpoint", "Bert2Bert \"\"\" # keyword arguments come in 3 flavors: encoder-specific", "# self._tie_or_clone_weights(self.encoder, self.decoder) pass @classmethod def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs):", "# Unless required by applicable law or agreed to in", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "through the decoder to decode a full sequence. Therefore, we", "if needed (training, first prediction pass) encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\", None)", "``config``, ``**kwargs`` will be directly passed to the underlying model's", "During prediction, we perform one forward pass through the encoder,", "keyword (bert, )... \"\"\" # self._tie_or_clone_weights(self.encoder, self.decoder) pass @classmethod def", "the Apache License, Version 2.0 (the \"License\"); # you may", "encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder ) encoder.config.is_decoder = False decoder = kwargs_decoder.pop(\"model\",", "the embedding weights. However the different model classes are inconsistent", "'hidden_size': 768, 'num_layers': 2}`\" ) kwargs[\"decoder_model\"] = torch.nn.LSTM(kwargs.pop(\"decoder_config\")) model =", "deactivated) To train the model, you need to first set", "model to use instead of a state dictionary loaded from", "Model2LSTM(PreTrainedEncoderDecoder): @classmethod def from_pretrained(cls, *args, **kwargs): if kwargs.get(\"decoder_model\", None) is", "the cached versions if they exists. proxies: (`optional`) dict, default", "model as `decoder_config` keyword argument. \" \" E.g. `decoder_config={'input_size': 768,", "The distinction between encoder and decoder at the model level", "initialize the encoder and decoder # The distinction between encoder", "to support Encoder-Decoder architectures \"\"\" from __future__ import absolute_import, division,", "encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get( \"attention_mask\", None ) decoder_outputs = self.decoder(decoder_input_ids,", "the directory. state_dict: (`optional`) dict: an optional state dictionnary for", "(optionally) another one as decoder when created with the `AutoModel.from_pretrained(pretrained_model_name_or_path)`", "\"\"\" The forward pass on a seq2eq depends what we", "one forward pass through both the encoder and decoder; -", "\" E.g. `decoder_config={'input_size': 768, 'hidden_size': 768, 'num_layers': 2}`\" ) kwargs[\"decoder_model\"]", "parameters in two separate directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\"))", "back in training mode with `model.train()` Params: encoder_pretrained_model_name_or_path: information necessary", "to the model as whole. # We let the specific", "model class that will be instantiated as a transformer architecture", "Tying the encoder and decoders' embeddings together. We need for", "the encoder' and decoder's parameters in two separate directories. \"\"\"", "architecture with one of the base model classes of the", "PyTorch model afterwards. model_args: (`optional`) Sequence of positional arguments: All", "on a seq2eq depends what we are performing: - During", "of a pre-trained model to load from cache or download,", "to a directory in which a downloaded pre-trained model configuration", "loaded by suppling a local directory as ``pretrained_model_name_or_path`` and a", "logger = logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is a generic", "def __init__(self, *args, **kwargs): super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights() def tie_weights(self):", "of proxy servers to use by protocol or endpoint, e.g.:", "model is set in evaluation mode by default using `model.eval()`", "encoder. Either: - a string with the `shortcut name` of", "from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): if ( \"bert\" not in pretrained_model_name_or_path", "protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The proxies", "*args, **kwargs ) return model class Model2LSTM(PreTrainedEncoderDecoder): @classmethod def from_pretrained(cls,", "(`optional`) string: Path to a directory in which a downloaded", "mode with `model.train()` Params: encoder_pretrained_model_name_or_path: information necessary to initiate the", "to a `tensorflow index checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In this", "\" - a torch.nn.LSTM model as `decoder_model` parameter (`decoder_model=lstm_model`), or\"", "loading the PyTorch model afterwards. decoder_pretrained_model_name_or_path: information necessary to initiate", "encoder_outputs class Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model` instantiates a Seq2Seq2 model where", "= BertConfig.from_pretrained() decoder = BertForMaskedLM(config) model = Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\"", "under the License is distributed on an \"AS IS\" BASIS,", "value. Remaining keys that do not correspond to any configuration", "encoder and decoder are of the same family. If the", "behavior and initialize, say, the decoder randomly by creating it", "kwargs_common.copy() kwargs_encoder.update( { argument[len(\"encoder_\") :]: value for argument, value in", "**kwargs): \"\"\" The forward pass on a seq2eq depends what", "model, you need to first set it back in training", "method. \"\"\" def __init__(self, encoder, decoder): super(PreTrainedEncoderDecoder, self).__init__() self.encoder =", "or two base classes of the library from pre-trained model", "in two separate directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def", "the encoder and decoders' embeddings together. We need for each", "``bert-base-uncased``. - a path to a `directory` containing model weights", "argument.startswith(\"encoder_\") and not argument.startswith(\"decoder_\") } kwargs_decoder = kwargs_common.copy() kwargs_encoder =", "False decoder = kwargs_decoder.pop(\"model\", None) if decoder is None: decoder", "pretrained_model_name_or_path or \"distilbert\" in pretrained_model_name_or_path ): raise ValueError(\"Only the Bert", "= super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs ) return model", "model), or - the model was saved using :func:`~transformers.PreTrainedModel.save_pretrained` and", "attribute will be used to override said attribute with the", "string of a pretrained model), or - the model was", "a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``.", "a configuration object should be provided as ``config`` argument. This", "argument.startswith(\"decoder_\") } kwargs_decoder = kwargs_common.copy() kwargs_encoder = kwargs_common.copy() kwargs_encoder.update( {", "decoder is None: decoder = AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder ) decoder.config.is_decoder", "class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs`` that corresponds", "not argument.startswith(\"decoder_\") } kwargs_decoder = kwargs_common.copy() kwargs_encoder = kwargs_common.copy() kwargs_encoder.update(", "force_download: (`optional`) boolean, default False: Force to (re-)download the model", "by # `encoder_`), decoder-specific (prefixed by `decoder_`) and those #", "ANY KIND, either express or implied. # See the License", "value in kwargs.items() if not argument.startswith(\"encoder_\") and not argument.startswith(\"decoder_\") }", "the License. # You may obtain a copy of the", "the encoder's hidden state through the decoder to decode a", "will be passed to both encoders and decoders. Examples:: model", "'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The proxies are used on each request.", "(`optional`) Remaining dictionary of keyword arguments. Can be used to", "and initialize the encoder and decoder # The distinction between", "self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def forward(self, encoder_input_ids, decoder_input_ids, **kwargs): \"\"\" The forward", "\"\"\" Tying the encoder and decoders' embeddings together. We need", "# See the License for the specific language governing permissions", "ValueError(\"Only the Bert model is currently supported.\") model = super(Model2Model,", "encoder and (optionally) another one as decoder when created with", "kwargs.items() if argument.startswith(\"encoder_\") } ) kwargs_decoder.update( { argument[len(\"decoder_\") :]: value", "of the XEmbedding layer for each model, but it is", "the decoder to decode a full sequence. Therefore, we skip", "the model is a model provided by the library (loaded", "currently supported.\") model = super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs", "``(batch_size, sequence_length)`` Indices of decoder input sequence tokens in the", "the TensorFlow checkpoint in a PyTorch model using the provided", "be directly passed to the underlying model's ``__init__`` method (we", "argument, value in kwargs.items() if not argument.startswith(\"encoder_\") and not argument.startswith(\"decoder_\")", "'foo.bar:4012'}. The proxies are used on each request. output_loading_info: (`optional`)", "we perform one forward pass through the encoder, and then", "can be automatically loaded when: - the model is a", "option can be used if you want to create a", "from one or two base classes of the library from", "is \"blocked\" by a model-specific keyword (bert, )... \"\"\" #", "in the vocabulary. decoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices", "used on each request. output_loading_info: (`optional`) boolean: Set to ``True``", "if encoder is None: encoder = AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder", "with the supplied ``kwargs`` value. Remaining keys that do not", "( \"bert\" not in pretrained_model_name_or_path or \"roberta\" in pretrained_model_name_or_path or", "passed to the underlying model's ``__init__`` function. You can specify", "and a configuration JSON file named `config.json` is found in", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "on each request. output_loading_info: (`optional`) boolean: Set to ``True`` to", "the standard cache should not be used. force_download: (`optional`) boolean,", "argument, value in kwargs.items() if argument.startswith(\"decoder_\") } ) # Encode", "one of the base model classes of the library as", "self.encoder = encoder self.decoder = decoder @classmethod def from_pretrained( cls,", "Encode if needed (training, first prediction pass) encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\",", "writing, software # distributed under the License is distributed on", "download, e.g.: ``bert-base-uncased``. - a path to a `directory` containing", "keyword argument. \" \" E.g. `decoder_config={'input_size': 768, 'hidden_size': 768, 'num_layers':", "training we perform one forward pass through both the encoder", "you want to create a model from a pretrained configuration", "`decoder_config` keyword argument. \" \" E.g. `decoder_config={'input_size': 768, 'hidden_size': 768,", "return model class Model2LSTM(PreTrainedEncoderDecoder): @classmethod def from_pretrained(cls, *args, **kwargs): if", "can be used if you want to create a model", "pass through both the encoder and decoder; - During prediction,", "each model, but it is \"blocked\" by a model-specific keyword", "shape ``(batch_size, sequence_length)`` Indices of decoder input sequence tokens in", "necessary to initiate the encoder. Either: - a string with", "Force to (re-)download the model weights and configuration files and", "Remaining dictionary of keyword arguments. \"\"\" # keyword arguments come", "e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The proxies are used on", "prediction pass) encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\", None) if encoder_hidden_states is None:", "beforehand as follows config = BertConfig.from_pretrained() decoder = BertForMaskedLM(config) model", "*args, **kwargs): super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights() def tie_weights(self): \"\"\" Tying", "with the `shortcut name` of a pre-trained model to load", "they exists. proxies: (`optional`) dict, default None: A dictionary of", "not in kwargs: raise ValueError( \"To load an LSTM in", "where both of the encoder and decoder are of the", "to this function. Params: encoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``", "super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs ) return model class", "saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``. - a path or url", "the model to use instead of an automatically loaded configuation.", "model afterwards. model_args: (`optional`) Sequence of positional arguments: All remaning", "embeddings.word_embeddings - XLMModel: embeddings - GPT2: wte - BertForMaskedLM: bert.embeddings.word_embeddings", "key with `encoder_` and `decoder_` respectively. (e.g. ``decoder_output_attention=True``). The remaining", "dictionary of proxy servers to use by protocol or endpoint,", "that will be instantiated as a transformer architecture with one", "weights. However the different model classes are inconsistent to that", "this function. Params: encoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices", "as whole. # We let the specific kwargs override the", "default None: A dictionary of proxy servers to use by", "such that it can be loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save", "from saved weights file. This option can be used if", "Can be used to update the configuration object (after it", "a seq2eq depends what we are performing: - During training", "passed to the underlying model's ``__init__`` method config: (`optional`) instance", "= decoder @classmethod def from_pretrained( cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args, **kwargs", "cached if the standard cache should not be used. force_download:", "language governing permissions and # limitations under the License. \"\"\"", "pretrained_model_name_or_path ): raise ValueError(\"Only the Bert model is currently supported.\")", "model. (e.g. ``output_attention=True``). Behave differently depending on whether a `config`", "are used on each request. output_loading_info: (`optional`) boolean: Set to", "Params: encoder_pretrained_model_name_or_path: information necessary to initiate the encoder. Either: -", "exists. proxies: (`optional`) dict, default None: A dictionary of proxy", "is not a simpler option. cache_dir: (`optional`) string: Path to", "= Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\" def __init__(self, *args, **kwargs): super(Model2Model, self).__init__(*args,", "not correspond to any configuration attribute will be passed to", "provided by the library (loaded with the ``shortcut-name`` string of", "= True model = cls(encoder, decoder) return model def save_pretrained(self,", "super(PreTrainedEncoderDecoder, self).__init__() self.encoder = encoder self.decoder = decoder @classmethod def", "initialized with the pretrained weight (the cross-attention will be intialized", "and configuration files and override the cached versions if they", "argument.startswith(\"decoder_\") } ) # Load and initialize the encoder and", "and override the cached versions if they exists. proxies: (`optional`)", "as encoder and (optionally) another one as decoder when created", "state dictionary loaded from saved weights file. This option can", "3 flavors: encoder-specific (prefixed by # `encoder_`), decoder-specific (prefixed by", "reloaded by suppling the save directory. - the model is", "decoder-specific (prefixed by `decoder_`) and those # that apply to", "to the model as a whole. # We let the", "self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def forward(self, encoder_input_ids, decoder_input_ids, **kwargs): \"\"\"", "model is loaded by suppling a local directory as ``pretrained_model_name_or_path``", "coding=utf-8 # Copyright 2018 The HuggingFace Inc. team. # #", "decoder_outputs + encoder_outputs class Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model` instantiates a Seq2Seq2", "**kwargs_decoder ) decoder.config.is_decoder = True model = cls(encoder, decoder) return", "both encoders and decoders. Examples:: model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') #", "arguments come in 3 flavors: encoder-specific (prefixed by # `encoder_`),", "The proxies are used on each request. output_loading_info: (`optional`) boolean:", "AutoModelWithLMHead logger = logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is a", "is None: encoder = AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder ) encoder.config.is_decoder", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "kwargs_encoder.pop(\"model\", None) if encoder is None: encoder = AutoModel.from_pretrained( encoder_pretrained_model_name_or_path,", "We save the encoder' and decoder's parameters in two separate", "derived from :class:`~transformers.PretrainedConfig`: Configuration for the model to use instead", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "kwargs_encoder = kwargs_common.copy() kwargs_encoder.update( { argument[len(\"encoder_\") :]: value for argument,", "to use instead of a state dictionary loaded from saved", "generic model class that will be instantiated as a transformer", "``shortcut-name`` string of a pretrained model), or - the model", "None: decoder = AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder ) decoder.config.is_decoder = True", "permissions and # limitations under the License. \"\"\" Classes to", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of decoder input sequence", "request. output_loading_info: (`optional`) boolean: Set to ``True`` to also return", ":class:`~transformers.PretrainedConfig`: Configuration for the model to use instead of an", "def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): if ( \"bert\" not in", "to use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname':", "is provided with ``config``, ``**kwargs`` will be directly passed to", "encoder and decoder; - During prediction, we perform one forward", "the key with `encoder_` and `decoder_` respectively. (e.g. ``decoder_output_attention=True``). The", "encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args, **kwargs ): r\"\"\" Instantiates an encoder and", "case of conflict. kwargs_common = { argument: value for argument,", "common ones in case of conflict. kwargs_common = { argument:", "and (optionally) another one as decoder when created with the", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "are performing: - During training we perform one forward pass", "training mode with `model.train()` Params: encoder_pretrained_model_name_or_path: information necessary to initiate", "this case, ``from_tf`` should be set to True and a", "library (loaded with the ``shortcut-name`` string of a pretrained model),", "specific language governing permissions and # limitations under the License.", "provided as ``config`` argument. This loading path is slower than", "decoder.config.is_decoder = True model = cls(encoder, decoder) return model def", "class Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model` instantiates a Seq2Seq2 model where both", "what we are performing: - During training we perform one", ")... \"\"\" # self._tie_or_clone_weights(self.encoder, self.decoder) pass @classmethod def from_pretrained(cls, pretrained_model_name_or_path,", "for each model, but it is \"blocked\" by a model-specific", "mode by default using `model.eval()` (Dropout modules are deactivated) To", "if they exists. proxies: (`optional`) dict, default None: A dictionary", "be intialized randomly if its weights are not present). It", "the encoder and the decoder will be initialized with the", "# you may not use this file except in compliance", "under the License. \"\"\" Classes to support Encoder-Decoder architectures \"\"\"", "checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In this case, ``from_tf`` should be", "model_args: (`optional`) Sequence of positional arguments: All remaning positional arguments", "encoder_hidden_states is None: encoder_outputs = self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states = encoder_outputs[", "and # limitations under the License. \"\"\" Classes to support", "from_pretrained(cls, *args, **kwargs): if kwargs.get(\"decoder_model\", None) is None: # We", "\"bert\" not in pretrained_model_name_or_path or \"roberta\" in pretrained_model_name_or_path or \"distilbert\"", "one as decoder when created with the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class method.", "using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``. - a path or url to", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "encoder and the decoder will be initialized with the pretrained", "cross-attention will be intialized randomly if its weights are not", "encoder_outputs = self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states = encoder_outputs[ 0 ] #", "a local directory as ``pretrained_model_name_or_path`` and a configuration JSON file", "argument.startswith(\"encoder_\") } ) kwargs_decoder.update( { argument[len(\"decoder_\") :]: value for argument,", "# that apply to the model as a whole. #", "the ``shortcut-name`` string of a pretrained model), or - the", "under the Apache License, Version 2.0 (the \"License\"); # you", "provided or automatically loaded: - If a configuration is provided", "the flag `is_decoder` that we need to set correctly. encoder", "the library (loaded with the ``shortcut-name`` string of a pretrained", "# output the last layer hidden state else: encoder_outputs =", "in Encoder-Decoder model, please supply either: \" \" - a", "file` (e.g. `./tf_model/model.ckpt.index`). In this case, ``from_tf`` should be set", "be initialized with the pretrained weight (the cross-attention will be", "\"\"\" from __future__ import absolute_import, division, print_function, unicode_literals import logging", "in a format such that it can be loaded using", "AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder ) encoder.config.is_decoder = False decoder =", "\"roberta\" in pretrained_model_name_or_path or \"distilbert\" in pretrained_model_name_or_path ): raise ValueError(\"Only", "if kwargs.get(\"decoder_model\", None) is None: # We will create a", "servers to use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128',", "directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def forward(self, encoder_input_ids, decoder_input_ids,", "config: (`optional`) instance of a class derived from :class:`~transformers.PretrainedConfig`: Configuration", "sequence_length)`` Indices of decoder input sequence tokens in the vocabulary.", "(`optional`) instance of a class derived from :class:`~transformers.PretrainedConfig`: Configuration for", "= AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder ) decoder.config.is_decoder = True model =", "kwargs override the common ones in case of conflict. kwargs_common", "Either: - a string with the `shortcut name` of a", "return decoder_outputs + encoder_outputs class Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model` instantiates a", "`decoder_config={'input_size': 768, 'hidden_size': 768, 'num_layers': 2}`\" ) kwargs[\"decoder_model\"] = torch.nn.LSTM(kwargs.pop(\"decoder_config\"))", "torch import nn from .modeling_auto import AutoModel, AutoModelWithLMHead logger =", "file named `config.json` is found in the directory. state_dict: (`optional`)", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "sequence_length)`` Indices of encoder input sequence tokens in the vocabulary.", "a whole. # We let the specific kwargs override the", "os import torch from torch import nn from .modeling_auto import", "def from_pretrained( cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args, **kwargs ): r\"\"\" Instantiates", "information necessary to initiate the encoder. Either: - a string", "decoder) return model def save_pretrained(self, save_directory): \"\"\" Save a Seq2Seq", "forward pass through both the encoder and decoder; - During", "cache should not be used. force_download: (`optional`) boolean, default False:", "and `decoder_` respectively. (e.g. ``decoder_output_attention=True``). The remaining kwargs will be", "sequence. Therefore, we skip the forward pass on the encoder", "Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model` instantiates a Seq2Seq2 model where both of", "= encoder_outputs[ 0 ] # output the last layer hidden", "checkpoints. The model is set in evaluation mode by default", "and those # that apply to the model as a", "separate directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def forward(self, encoder_input_ids,", "for the model to use instead of a state dictionary", "if \"decoder_config\" not in kwargs: raise ValueError( \"To load an", "with `model.train()` Params: encoder_pretrained_model_name_or_path: information necessary to initiate the encoder.", "missing keys, unexpected keys and error messages. kwargs: (`optional`) Remaining", "scripts and loading the PyTorch model afterwards. decoder_pretrained_model_name_or_path: information necessary", "model configuration should be cached if the standard cache should", "need to first set it back in training mode with", "and initiate the model. (e.g. ``output_attention=True``). Behave differently depending on", "instantiated as a transformer architecture with one of the base", "conflict. kwargs_common = { argument: value for argument, value in", "passed to this function. Params: encoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size,", "**kwargs): super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights() def tie_weights(self): \"\"\" Tying the", "# The distinction between encoder and decoder at the model", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "last layer hidden state else: encoder_outputs = () # Decode", "a directory in which a downloaded pre-trained model configuration should", "if its weights are not present). It is possible to", "value in kwargs.items() if argument.startswith(\"encoder_\") } ) kwargs_decoder.update( { argument[len(\"decoder_\")", "Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\" def __init__(self, *args, **kwargs): super(Model2Model, self).__init__(*args, **kwargs)", "underlying model's ``__init__`` method (we assume all relevant updates to", "model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``. - a path", "import os import torch from torch import nn from .modeling_auto", "model to use instead of an automatically loaded configuation. Configuration", "you should check if using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is not", "kwargs.items() if argument.startswith(\"decoder_\") } ) # Encode if needed (training,", "instead of an automatically loaded configuation. Configuration can be automatically", "Apache License, Version 2.0 (the \"License\"); # you may not", "should be provided as ``config`` argument. This loading path is", "either express or implied. # See the License for the", "the encoder if an argument named `encoder_hidden_state` is passed to", "decode a full sequence. Therefore, we skip the forward pass", "string: Path to a directory in which a downloaded pre-trained", "set to True and a configuration object should be provided", "'num_layers': 2}`\" ) kwargs[\"decoder_model\"] = torch.nn.LSTM(kwargs.pop(\"decoder_config\")) model = super(Model2LSTM, cls).from_pretrained(*args,", "is made # by the value of the flag `is_decoder`", "set correctly. encoder = kwargs_encoder.pop(\"model\", None) if encoder is None:", "dict, default None: A dictionary of proxy servers to use", "classes are inconsistent to that respect: - BertModel: embeddings.word_embeddings -", "in kwargs.items() if argument.startswith(\"decoder_\") } ) # Encode if needed", "and then perform several forward passes with the encoder's hidden", "The HuggingFace Inc. team. # # Licensed under the Apache", "want to create a model from a pretrained configuration but", "kwargs[\"decoder_model\"] = torch.nn.LSTM(kwargs.pop(\"decoder_config\")) model = super(Model2LSTM, cls).from_pretrained(*args, **kwargs) return model", "- the model was saved using :func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded", "): raise ValueError(\"Only the Bert model is currently supported.\") model", "through the encoder, and then perform several forward passes with", "with the ``shortcut-name`` string of a pretrained model), or -", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "dictionary of keyword arguments. \"\"\" # keyword arguments come in", "follows config = BertConfig.from_pretrained() decoder = BertForMaskedLM(config) model = Model2Model.from_pretrained('bert-base-uncased',", "Seq2Seq2 model where both of the encoder and decoder are", "# Encode if needed (training, first prediction pass) encoder_hidden_states =", "configuration attribute will be used to override said attribute with", "passed to the configuration class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key", "- GPT2: wte - BertForMaskedLM: bert.embeddings.word_embeddings - RobertaForMaskedLM: roberta.embeddings.word_embeddings argument", "argument: value for argument, value in kwargs.items() if not argument.startswith(\"encoder_\")", "`encoder_`), decoder-specific (prefixed by `decoder_`) and those # that apply", "prediction, we perform one forward pass through the encoder, and", "differently depending on whether a `config` is provided or automatically", "present). It is possible to override this behavior and initialize,", "by default using `model.eval()` (Dropout modules are deactivated) To train", "from cache or download, e.g.: ``bert-base-uncased``. - a path to", "initiate the decoder. Either: - a string with the `shortcut", "and decoder's parameters in two separate directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\"))", "use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}.", "to the underlying model's ``__init__`` function. You can specify kwargs", "of the same family. If the name of or that", "super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights() def tie_weights(self): \"\"\" Tying the encoder", "# Decode kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get( \"attention_mask\", None", "value for argument, value in kwargs.items() if not argument.startswith(\"encoder_\") and", "all relevant updates to the configuration have already been done)", "dictionnary containing missing keys, unexpected keys and error messages. kwargs:", "model as whole. # We let the specific kwargs override", "weights are not present). It is possible to override this", "state_dict: (`optional`) dict: an optional state dictionnary for the model", "are deactivated) To train the model, you need to first", "Behave differently depending on whether a `config` is provided or", "and decoder # The distinction between encoder and decoder at", "dictionary loaded from saved weights file. This option can be", "whether a `config` is provided or automatically loaded: - If", "``kwargs`` that corresponds to a configuration attribute will be used", "AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder ) decoder.config.is_decoder = True model = cls(encoder,", "save the encoder' and decoder's parameters in two separate directories.", "`decoder_`) and those # that apply to the model as", "needed (training, first prediction pass) encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\", None) if", "kwargs.items() if argument.startswith(\"decoder_\") } ) # Load and initialize the", "seq2eq depends what we are performing: - During training we", "not in pretrained_model_name_or_path or \"roberta\" in pretrained_model_name_or_path or \"distilbert\" in", "will be directly passed to the underlying model's ``__init__`` method", "PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert \"\"\" # keyword arguments come", "a full sequence. Therefore, we skip the forward pass on", "`model.train()` Params: encoder_pretrained_model_name_or_path: information necessary to initiate the encoder. Either:", "``config`` argument. This loading path is slower than converting the", "tokens in the vocabulary. decoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``", "will be passed to the underlying model's ``__init__`` function. You", "or that path to a pretrained model is specified the", "use this file except in compliance with the License. #", "and decoder by prefixing the key with `encoder_` and `decoder_`", "2018 The HuggingFace Inc. team. # # Licensed under the", "} ) # Encode if needed (training, first prediction pass)", "or automatically loaded: - If a configuration is provided with", "model, please supply either: \" \" - a torch.nn.LSTM model", "its weights are not present). It is possible to override", ".modeling_auto import AutoModel, AutoModelWithLMHead logger = logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module): r\"\"\"", "pass @classmethod def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): if ( \"bert\"", "the specific kwargs override the common ones in case of", "model classes are inconsistent to that respect: - BertModel: embeddings.word_embeddings", "to both encoders and decoders. Examples:: model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased')", "and decoder at the model level is made # by", "encoder' and decoder's parameters in two separate directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory,", "an LSTM in Encoder-Decoder model, please supply either: \" \"", "and the decoder will be initialized with the pretrained weight", "unexpected keys and error messages. kwargs: (`optional`) Remaining dictionary of", "from :class:`~transformers.PretrainedConfig`: Configuration for the model to use instead of", "load from cache or download, e.g.: ``bert-base-uncased``. - a path", "if not argument.startswith(\"encoder_\") and not argument.startswith(\"decoder_\") } kwargs_decoder = kwargs_common.copy()", "relevant updates to the configuration have already been done) -", "kwargs_common = { argument: value for argument, value in kwargs.items()", "in compliance with the License. # You may obtain a", "will be intialized randomly if its weights are not present).", "loaded when: - the model is a model provided by", "software # distributed under the License is distributed on an", "state through the decoder to decode a full sequence. Therefore,", "of encoder input sequence tokens in the vocabulary. decoder_input_ids: ``torch.LongTensor``", "governing permissions and # limitations under the License. \"\"\" Classes", "the encoder, and then perform several forward passes with the", "and decoder; - During prediction, we perform one forward pass", "correspond to any configuration attribute will be passed to the", "argument. This loading path is slower than converting the TensorFlow", "return a dictionnary containing missing keys, unexpected keys and error", "to a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.:", "and not argument.startswith(\"decoder_\") } kwargs_decoder = kwargs_common.copy() kwargs_encoder = kwargs_common.copy()", "\"attention_mask\", None ) decoder_outputs = self.decoder(decoder_input_ids, **kwargs_decoder) return decoder_outputs +", "Seq2Seq model and its configuration file in a format such", "a path or url to a `tensorflow index checkpoint file`", "method (we assume all relevant updates to the configuration have", "to a configuration attribute will be used to override said", "cls(encoder, decoder) return model def save_pretrained(self, save_directory): \"\"\" Save a", "model-specific keyword (bert, )... \"\"\" # self._tie_or_clone_weights(self.encoder, self.decoder) pass @classmethod", ":]: value for argument, value in kwargs.items() if argument.startswith(\"decoder_\") }", "kwargs.get(\"decoder_model\", None) is None: # We will create a randomly", "default using `model.eval()` (Dropout modules are deactivated) To train the", "BertForMaskedLM(config) model = Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\" def __init__(self, *args, **kwargs):", "are of the same family. If the name of or", "= self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states = encoder_outputs[ 0 ] # output", "using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``. - a path or url to", "(the cross-attention will be intialized randomly if its weights are", "or download, e.g.: ``bert-base-uncased``. - a path to a `directory`", "\" \" E.g. `decoder_config={'input_size': 768, 'hidden_size': 768, 'num_layers': 2}`\" )", "model's ``__init__`` method config: (`optional`) instance of a class derived", "self).__init__() self.encoder = encoder self.decoder = decoder @classmethod def from_pretrained(", "model class Model2LSTM(PreTrainedEncoderDecoder): @classmethod def from_pretrained(cls, *args, **kwargs): if kwargs.get(\"decoder_model\",", "`AutoModel.from_pretrained(pretrained_model_name_or_path)` class method. \"\"\" def __init__(self, encoder, decoder): super(PreTrainedEncoderDecoder, self).__init__()", "to decode a full sequence. Therefore, we skip the forward", "`directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``. -", "together. We need for each to get down to the", "using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is not a simpler option. cache_dir:", "- BertForMaskedLM: bert.embeddings.word_embeddings - RobertaForMaskedLM: roberta.embeddings.word_embeddings argument of the XEmbedding", "a dictionary of configuration parameters that will be used to", "for the encoder and decoder by prefixing the key with", "using the provided conversion scripts and loading the PyTorch model", "*args, **kwargs): if ( \"bert\" not in pretrained_model_name_or_path or \"roberta\"", ":class:`~transformers.PreTrainedEncoderDecoder` is a generic model class that will be instantiated", "nn from .modeling_auto import AutoModel, AutoModelWithLMHead logger = logging.getLogger(__name__) class", "override this behavior and initialize, say, the decoder randomly by", "initilized LSTM model as decoder if \"decoder_config\" not in kwargs:", "being loaded) and initiate the model. (e.g. ``output_attention=True``). Behave differently", "to override said attribute with the supplied ``kwargs`` value. Remaining", "we perform one forward pass through both the encoder and", "load your own weights. In this case though, you should", "cached versions if they exists. proxies: (`optional`) dict, default None:", "optional state dictionnary for the model to use instead of", "None: encoder_outputs = self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states = encoder_outputs[ 0 ]", "limitations under the License. \"\"\" Classes to support Encoder-Decoder architectures", "pass through the encoder, and then perform several forward passes", "and decoders' embeddings together. We need for each to get", "with the License. # You may obtain a copy of", "decoders' embeddings together. We need for each to get down", "if argument.startswith(\"decoder_\") } ) # Encode if needed (training, first", "= PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert \"\"\" # keyword arguments", "model as decoder if \"decoder_config\" not in kwargs: raise ValueError(", "# initialize Bert2Bert \"\"\" # keyword arguments come in 3", "in pretrained_model_name_or_path or \"distilbert\" in pretrained_model_name_or_path ): raise ValueError(\"Only the", "ValueError( \"To load an LSTM in Encoder-Decoder model, please supply", "\"\"\" # keyword arguments come in 3 flavors: encoder-specific (prefixed", "encoder-specific (prefixed by # `encoder_`), decoder-specific (prefixed by `decoder_`) and", "model as a whole. # We let the specific kwargs", "are inconsistent to that respect: - BertModel: embeddings.word_embeddings - RoBERTa:", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "the save directory. - the model is loaded by suppling", "down to the embedding weights. However the different model classes", "E.g. `decoder_config={'input_size': 768, 'hidden_size': 768, 'num_layers': 2}`\" ) kwargs[\"decoder_model\"] =", "model = cls(encoder, decoder) return model def save_pretrained(self, save_directory): \"\"\"", "perform one forward pass through the encoder, and then perform", "the model as a whole. # We let the specific", "- During training we perform one forward pass through both", "slower than converting the TensorFlow checkpoint in a PyTorch model", "encoder's hidden state through the decoder to decode a full", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "to True and a configuration object should be provided as", "if argument.startswith(\"decoder_\") } ) # Load and initialize the encoder", "= () # Decode kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get(", "not present). It is possible to override this behavior and", "first prediction pass) encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\", None) if encoder_hidden_states is", "CONDITIONS OF ANY KIND, either express or implied. # See", "the supplied ``kwargs`` value. Remaining keys that do not correspond", "will be used to override said attribute with the supplied", "encoder input sequence tokens in the vocabulary. decoder_input_ids: ``torch.LongTensor`` of", "] # output the last layer hidden state else: encoder_outputs", "or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The proxies are", "the pretrained weight (the cross-attention will be intialized randomly if", "of or that path to a pretrained model is specified", "model is currently supported.\") model = super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path,", "said attribute with the supplied ``kwargs`` value. Remaining keys that", "TensorFlow checkpoint in a PyTorch model using the provided conversion", "'http://hostname': 'foo.bar:4012'}. The proxies are used on each request. output_loading_info:", "the same family. If the name of or that path", "get down to the embedding weights. However the different model", "argument, value in kwargs.items() if argument.startswith(\"decoder_\") } ) # Load", "for each to get down to the embedding weights. However", "(`optional`) Sequence of positional arguments: All remaning positional arguments will", "configuration is not provided, ``kwargs`` will be first passed to", "in case of conflict. kwargs_common = { argument: value for", "we skip the forward pass on the encoder if an", "directory. - the model is loaded by suppling a local", "then perform several forward passes with the encoder's hidden state", "full sequence. Therefore, we skip the forward pass on the", "save_directory): \"\"\" Save a Seq2Seq model and its configuration file", "value for argument, value in kwargs.items() if argument.startswith(\"encoder_\") } )", "a model-specific keyword (bert, )... \"\"\" # self._tie_or_clone_weights(self.encoder, self.decoder) pass", "will be first passed to the configuration class initialization function", "pretrained configuration but load your own weights. In this case", "automatically loaded configuation. Configuration can be automatically loaded when: -", "= { argument: value for argument, value in kwargs.items() if", "and error messages. kwargs: (`optional`) Remaining dictionary of keyword arguments.", "dictionnary for the model to use instead of a state", "boolean, default False: Force to (re-)download the model weights and", "error messages. kwargs: (`optional`) Remaining dictionary of keyword arguments. Can", "the configuration class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs``", "pre-trained model checkpoints. The model is set in evaluation mode", "named `encoder_hidden_state` is passed to this function. Params: encoder_input_ids: ``torch.LongTensor``", "if an argument named `encoder_hidden_state` is passed to this function.", "= self.decoder(decoder_input_ids, **kwargs_decoder) return decoder_outputs + encoder_outputs class Model2Model(PreTrainedEncoderDecoder): r\"\"\"", "} kwargs_decoder = kwargs_common.copy() kwargs_encoder = kwargs_common.copy() kwargs_encoder.update( { argument[len(\"encoder_\")", "(`optional`) Remaining dictionary of keyword arguments. \"\"\" # keyword arguments", "perform several forward passes with the encoder's hidden state through", "argument. \" \" E.g. `decoder_config={'input_size': 768, 'hidden_size': 768, 'num_layers': 2}`\"", "default False: Force to (re-)download the model weights and configuration", "kwargs: raise ValueError( \"To load an LSTM in Encoder-Decoder model,", "arguments will be passed to the underlying model's ``__init__`` method", "specify kwargs sepcific for the encoder and decoder by prefixing", "- a path to a `directory` containing model weights saved", "as ``config`` argument. This loading path is slower than converting", "encoders and decoders. Examples:: model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') # initialize", "randomly by creating it beforehand as follows config = BertConfig.from_pretrained()", "`:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save the encoder' and decoder's parameters in two", "encoder and decoders' embeddings together. We need for each to", "cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args, **kwargs ): r\"\"\" Instantiates an encoder", "each to get down to the embedding weights. However the", "containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``. - a", "# that apply to the model as whole. # We", "two separate directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def forward(self,", "both the encoder and decoder; - During prediction, we perform", "the XEmbedding layer for each model, but it is \"blocked\"", "The forward pass on a seq2eq depends what we are", "cache_dir: (`optional`) string: Path to a directory in which a", "with `encoder_` and `decoder_` respectively. (e.g. ``decoder_output_attention=True``). The remaining kwargs", "vocabulary. decoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of decoder", "from a pretrained configuration but load your own weights. In", "downloaded pre-trained model configuration should be cached if the standard", "decoder): super(PreTrainedEncoderDecoder, self).__init__() self.encoder = encoder self.decoder = decoder @classmethod", "weights file. This option can be used if you want", "kwargs: (`optional`) Remaining dictionary of keyword arguments. Can be used", "torch from torch import nn from .modeling_auto import AutoModel, AutoModelWithLMHead", "kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get( \"attention_mask\", None ) decoder_outputs = self.decoder(decoder_input_ids, **kwargs_decoder)", "initiate the encoder. Either: - a string with the `shortcut", "will create a randomly initilized LSTM model as decoder if", "the common ones in case of conflict. kwargs_common = {", "BertConfig.from_pretrained() decoder = BertForMaskedLM(config) model = Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\" def", "encoder_input_ids, decoder_input_ids, **kwargs): \"\"\" The forward pass on a seq2eq", "directory. state_dict: (`optional`) dict: an optional state dictionnary for the", "RobertaForMaskedLM: roberta.embeddings.word_embeddings argument of the XEmbedding layer for each model,", "During training we perform one forward pass through both the", "used to initialize a\" \" torch.nn.LSTM model as `decoder_config` keyword", "encoder and a decoder from one or two base classes", "configuration file in a format such that it can be", ":func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded by suppling the save directory. -", "= kwargs_encoder.pop(\"hidden_states\", None) if encoder_hidden_states is None: encoder_outputs = self.encoder(encoder_input_ids,", "the model is loaded by suppling a local directory as", "768, 'num_layers': 2}`\" ) kwargs[\"decoder_model\"] = torch.nn.LSTM(kwargs.pop(\"decoder_config\")) model = super(Model2LSTM,", "was saved using :func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded by suppling the", "{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The proxies are used on each", "a model provided by the library (loaded with the ``shortcut-name``", "evaluation mode by default using `model.eval()` (Dropout modules are deactivated)", "those # that apply to the model as whole. #", "\"decoder_config\" not in kwargs: raise ValueError( \"To load an LSTM", "are not present). It is possible to override this behavior", "if you want to create a model from a pretrained", "override said attribute with the supplied ``kwargs`` value. Remaining keys", "of shape ``(batch_size, sequence_length)`` Indices of encoder input sequence tokens", "is None: encoder_outputs = self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states = encoder_outputs[ 0", "the License. \"\"\" Classes to support Encoder-Decoder architectures \"\"\" from", "if encoder_hidden_states is None: encoder_outputs = self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states =", "is specified the encoder and the decoder will be initialized", "as follows config = BertConfig.from_pretrained() decoder = BertForMaskedLM(config) model =", "create a model from a pretrained configuration but load your", "file. This option can be used if you want to", "Remaining keys that do not correspond to any configuration attribute", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "Bert model is currently supported.\") model = super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path,", "arguments: All remaning positional arguments will be passed to the", "a configuration JSON file named `config.json` is found in the", "though, you should check if using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is", "load an LSTM in Encoder-Decoder model, please supply either: \"", "output_loading_info: (`optional`) boolean: Set to ``True`` to also return a", "pre-trained model to load from cache or download, e.g.: ``bert-base-uncased``.", "its configuration file in a format such that it can", "corresponds to a configuration attribute will be used to override", "the PyTorch model afterwards. model_args: (`optional`) Sequence of positional arguments:", "containing missing keys, unexpected keys and error messages. kwargs: (`optional`)", "the Bert model is currently supported.\") model = super(Model2Model, cls).from_pretrained(", "function. You can specify kwargs sepcific for the encoder and", "perform one forward pass through both the encoder and decoder;", "r\"\"\" :class:`~transformers.Model2Model` instantiates a Seq2Seq2 model where both of the", "kwargs_decoder = kwargs_common.copy() kwargs_encoder = kwargs_common.copy() kwargs_encoder.update( { argument[len(\"encoder_\") :]:", "if argument.startswith(\"encoder_\") } ) kwargs_decoder.update( { argument[len(\"decoder_\") :]: value for", "an automatically loaded configuation. Configuration can be automatically loaded when:", "afterwards. decoder_pretrained_model_name_or_path: information necessary to initiate the decoder. Either: -", "say, the decoder randomly by creating it beforehand as follows", "decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs ) return model class Model2LSTM(PreTrainedEncoderDecoder): @classmethod def", "loading path is slower than converting the TensorFlow checkpoint in", "(e.g. `./tf_model/model.ckpt.index`). In this case, ``from_tf`` should be set to", "input sequence tokens in the vocabulary. decoder_input_ids: ``torch.LongTensor`` of shape", "state else: encoder_outputs = () # Decode kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states", "Version 2.0 (the \"License\"); # you may not use this", "input sequence tokens in the vocabulary. kwargs: (`optional`) Remaining dictionary", "create a randomly initilized LSTM model as decoder if \"decoder_config\"", "case though, you should check if using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained`", "decoder will be initialized with the pretrained weight (the cross-attention", "model and its configuration file in a format such that", "# We will create a randomly initilized LSTM model as", "configuration attribute will be passed to the underlying model's ``__init__``", "= logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is a generic model", "be set to True and a configuration object should be", "the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class method. \"\"\" def __init__(self, encoder, decoder): super(PreTrainedEncoderDecoder,", "The remaining kwargs will be passed to both encoders and", "but it is \"blocked\" by a model-specific keyword (bert, )...", "# limitations under the License. \"\"\" Classes to support Encoder-Decoder", "encoder_hidden_states = encoder_outputs[ 0 ] # output the last layer", "with the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class method. \"\"\" def __init__(self, encoder, decoder):", "= False decoder = kwargs_decoder.pop(\"model\", None) if decoder is None:", "``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of encoder input sequence", "by applicable law or agreed to in writing, software #", "be used to override said attribute with the supplied ``kwargs``", "(bert, )... \"\"\" # self._tie_or_clone_weights(self.encoder, self.decoder) pass @classmethod def from_pretrained(cls,", "classes of the library as encoder and (optionally) another one", "() # Decode kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get( \"attention_mask\",", "from .modeling_auto import AutoModel, AutoModelWithLMHead logger = logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module):", "embedding weights. However the different model classes are inconsistent to", "to the embedding weights. However the different model classes are", "PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is a generic model class that will", "- XLMModel: embeddings - GPT2: wte - BertForMaskedLM: bert.embeddings.word_embeddings -", ":func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``. - a path or url to a", "def save_pretrained(self, save_directory): \"\"\" Save a Seq2Seq model and its", "is provided or automatically loaded: - If a configuration is", "configuration have already been done) - If a configuration is", "forward pass on the encoder if an argument named `encoder_hidden_state`", "torch.nn.LSTM model as `decoder_config` keyword argument. \" \" E.g. `decoder_config={'input_size':", "performing: - During training we perform one forward pass through", "the model weights and configuration files and override the cached", "raise ValueError( \"To load an LSTM in Encoder-Decoder model, please", "``./my_model_directory/encoder``. - a path or url to a `tensorflow index", "dictionary of keyword arguments. Can be used to update the", "In this case though, you should check if using :func:`~transformers.PreTrainedModel.save_pretrained`", "used to update the configuration object (after it being loaded)", "a PyTorch model using the provided conversion scripts and loading", "set in evaluation mode by default using `model.eval()` (Dropout modules", "None ) decoder_outputs = self.decoder(decoder_input_ids, **kwargs_decoder) return decoder_outputs + encoder_outputs", "e.g.: ``bert-base-uncased``. - a path to a `directory` containing model", "applicable law or agreed to in writing, software # distributed", "None) if encoder_hidden_states is None: encoder_outputs = self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states", "an argument named `encoder_hidden_state` is passed to this function. Params:", "the library as encoder and (optionally) another one as decoder", "this behavior and initialize, say, the decoder randomly by creating", "configuation. Configuration can be automatically loaded when: - the model", "the model as whole. # We let the specific kwargs", "initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs`` that corresponds to", "decoder @classmethod def from_pretrained( cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args, **kwargs ):", "the model, you need to first set it back in", "provided, ``kwargs`` will be first passed to the configuration class", "if the standard cache should not be used. force_download: (`optional`)", "`shortcut name` of a pre-trained model to load from cache", "and its configuration file in a format such that it", "# You may obtain a copy of the License at", "in the vocabulary. kwargs: (`optional`) Remaining dictionary of keyword arguments.", "randomly if its weights are not present). It is possible", "afterwards. model_args: (`optional`) Sequence of positional arguments: All remaning positional", "be used to update the configuration object (after it being", "**kwargs_decoder) return decoder_outputs + encoder_outputs class Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model` instantiates", "to get down to the embedding weights. However the different", "(e.g. ``output_attention=True``). Behave differently depending on whether a `config` is", "None: encoder = AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder ) encoder.config.is_decoder =", "**kwargs_encoder ) encoder.config.is_decoder = False decoder = kwargs_decoder.pop(\"model\", None) if", "between encoder and decoder at the model level is made", "for argument, value in kwargs.items() if argument.startswith(\"encoder_\") } ) kwargs_decoder.update(", "saved weights file. This option can be used if you", "configuration parameters that will be used to initialize a\" \"", "768, 'hidden_size': 768, 'num_layers': 2}`\" ) kwargs[\"decoder_model\"] = torch.nn.LSTM(kwargs.pop(\"decoder_config\")) model", "used. force_download: (`optional`) boolean, default False: Force to (re-)download the", "to the underlying model's ``__init__`` method (we assume all relevant", "that do not correspond to any configuration attribute will be", "suppling a local directory as ``pretrained_model_name_or_path`` and a configuration JSON", "= kwargs_encoder.pop(\"model\", None) if encoder is None: encoder = AutoModel.from_pretrained(", "*model_args, **kwargs ): r\"\"\" Instantiates an encoder and a decoder", "a configuration is provided with ``config``, ``**kwargs`` will be directly", "kwargs will be passed to both encoders and decoders. Examples::", "of shape ``(batch_size, sequence_length)`` Indices of decoder input sequence tokens", "*model_args, **kwargs_encoder ) encoder.config.is_decoder = False decoder = kwargs_decoder.pop(\"model\", None)", "cache or download, e.g.: ``bert-base-uncased``. - a path to a", "is set in evaluation mode by default using `model.eval()` (Dropout", "is slower than converting the TensorFlow checkpoint in a PyTorch", "a generic model class that will be instantiated as a", "0 ] # output the last layer hidden state else:", "a randomly initilized LSTM model as decoder if \"decoder_config\" not", "the encoder and decoder by prefixing the key with `encoder_`", "forward passes with the encoder's hidden state through the decoder", "= encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get( \"attention_mask\", None ) decoder_outputs =", "weights and configuration files and override the cached versions if", "\"decoder\")) def forward(self, encoder_input_ids, decoder_input_ids, **kwargs): \"\"\" The forward pass", "not argument.startswith(\"encoder_\") and not argument.startswith(\"decoder_\") } kwargs_decoder = kwargs_common.copy() kwargs_encoder", "- BertModel: embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings - XLMModel: embeddings -", "layer for each model, but it is \"blocked\" by a", "decoder_pretrained_model_name_or_path: information necessary to initiate the decoder. Either: - a", "named `config.json` is found in the directory. state_dict: (`optional`) dict:", "train the model, you need to first set it back", "\"License\"); # you may not use this file except in", "pre-trained model configuration should be cached if the standard cache", "as a whole. # We let the specific kwargs override", "__init__(self, *args, **kwargs): super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights() def tie_weights(self): \"\"\"", "dict: an optional state dictionnary for the model to use", "weight (the cross-attention will be intialized randomly if its weights", "= encoder self.decoder = decoder @classmethod def from_pretrained( cls, encoder_pretrained_model_name_or_path=None,", "to set correctly. encoder = kwargs_encoder.pop(\"model\", None) if encoder is", "by creating it beforehand as follows config = BertConfig.from_pretrained() decoder", "We will create a randomly initilized LSTM model as decoder", "on the encoder if an argument named `encoder_hidden_state` is passed", "- a string with the `shortcut name` of a pre-trained", "that apply to the model as whole. # We let", "already been done) - If a configuration is not provided,", "wte - BertForMaskedLM: bert.embeddings.word_embeddings - RobertaForMaskedLM: roberta.embeddings.word_embeddings argument of the", "of the base model classes of the library as encoder", "- If a configuration is not provided, ``kwargs`` will be", "is found in the directory. state_dict: (`optional`) dict: an optional", "path or url to a `tensorflow index checkpoint file` (e.g.", "object (after it being loaded) and initiate the model. (e.g.", "self._tie_or_clone_weights(self.encoder, self.decoder) pass @classmethod def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): if", "as decoder if \"decoder_config\" not in kwargs: raise ValueError( \"To", "another one as decoder when created with the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class", "two base classes of the library from pre-trained model checkpoints.", "It is possible to override this behavior and initialize, say,", "`encoder_hidden_state` is passed to this function. Params: encoder_input_ids: ``torch.LongTensor`` of" ]
[ "python_callable=mean_homem, dag=dag ) def mean_mulher(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med =", "} # Dag definition dag = DAG( 'treino-03', description=\"Extrai dados", "med = df.loc[df.Sex == 'male'].Age.mean() print(f'Media de idade dos homens", "de idade dos homens no Titanic: {med}') branch_homem = PythonOperator(", "task_id='condicional', python_callable=MouF, provide_context=True, dag=dag ) def mean_homem(): df = pd.read_csv('/usr/local/airflow/data/train.csv')", "= BashOperator( task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv', dag=dag ) def", "'female'].Age.mean() print(f'Media de idade das mulheres no Titanic: {med}') branch_mulher", "dag=dag ) def sorteia_h_m(): return random.choice(['male', 'female']) escolhe_h_m = PythonOperator(", "['<EMAIL>', '<EMAIL>'], 'email_on_failure': False, 'email_on_retry': False, 'retries': 1, 'Retry_delay': timedelta(minutes=1)", "= df.loc[df.Sex == 'male'].Age.mean() print(f'Media de idade dos homens no", "homens no Titanic: {med}') branch_homem = PythonOperator( task_id='branch_homem', python_callable=mean_homem, dag=dag", "idade dos homens no Titanic: {med}') branch_homem = PythonOperator( task_id='branch_homem',", "from airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from", "airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime import datetime, timedelta import", "def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value == 'male': return 'branch_homem' else:", "de idade das mulheres no Titanic: {med}') branch_mulher = PythonOperator(", "PythonOperator( task_id='branch_homem', python_callable=mean_homem, dag=dag ) def mean_mulher(): df = pd.read_csv('/usr/local/airflow/data/train.csv')", "med = df.loc[df.Sex == 'female'].Age.mean() print(f'Media de idade das mulheres", "* * * *' ) get_data = BashOperator( task_id='get-data', bash_command='curl", "'Retry_delay': timedelta(minutes=1) } # Dag definition dag = DAG( 'treino-03',", "* *' ) get_data = BashOperator( task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o", "from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime import datetime, timedelta", "return 'branch_mulher' male_female = BranchPythonOperator( task_id='condicional', python_callable=MouF, provide_context=True, dag=dag )", "{med}') branch_mulher = PythonOperator( task_id='branch_mulher', python_callable=mean_mulher, dag=dag ) get_data >>", "pd import random # Default args definition default_args = {", "dag=dag ) get_data >> escolhe_h_m >> male_female >> [branch_homem, branch_mulher]", "mulheres\", default_args = default_args, schedule_interval='*/20 * * * *' )", "/usr/local/airflow/data/train.csv', dag=dag ) def sorteia_h_m(): return random.choice(['male', 'female']) escolhe_h_m =", "def sorteia_h_m(): return random.choice(['male', 'female']) escolhe_h_m = PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m,", "mean_mulher(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'female'].Age.mean() print(f'Media", "= pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'male'].Age.mean() print(f'Media de idade", "sorteia_h_m(): return random.choice(['male', 'female']) escolhe_h_m = PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag", "mean_homem(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'male'].Age.mean() print(f'Media", "else: return 'branch_mulher' male_female = BranchPythonOperator( task_id='condicional', python_callable=MouF, provide_context=True, dag=dag", "= PythonOperator( task_id='branch_homem', python_callable=mean_homem, dag=dag ) def mean_mulher(): df =", "def mean_homem(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'male'].Age.mean()", "def mean_mulher(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'female'].Age.mean()", "ou mulheres\", default_args = default_args, schedule_interval='*/20 * * * *'", "datetime(2020, 11, 29, 18, 20), 'email': ['<EMAIL>', '<EMAIL>'], 'email_on_failure': False,", "'male': return 'branch_homem' else: return 'branch_mulher' male_female = BranchPythonOperator( task_id='condicional',", "1, 'Retry_delay': timedelta(minutes=1) } # Dag definition dag = DAG(", "import DAG from airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator import PythonOperator,", "male_female = BranchPythonOperator( task_id='condicional', python_callable=MouF, provide_context=True, dag=dag ) def mean_homem():", "DAG( 'treino-03', description=\"Extrai dados do Titanic e calcula idade media", "description=\"Extrai dados do Titanic e calcula idade media para homens", "pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'male'].Age.mean() print(f'Media de idade dos", "= df.loc[df.Sex == 'female'].Age.mean() print(f'Media de idade das mulheres no", "default_args = { 'owner': 'Rafael', 'depends_on_past': False, 'start_date': datetime(2020, 11,", "bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv', dag=dag ) def sorteia_h_m(): return random.choice(['male',", "'owner': 'Rafael', 'depends_on_past': False, 'start_date': datetime(2020, 11, 29, 18, 20),", "import pandas as pd import random # Default args definition", "if value == 'male': return 'branch_homem' else: return 'branch_mulher' male_female", "= BranchPythonOperator( task_id='condicional', python_callable=MouF, provide_context=True, dag=dag ) def mean_homem(): df", "from airflow import DAG from airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator", "'email_on_failure': False, 'email_on_retry': False, 'retries': 1, 'Retry_delay': timedelta(minutes=1) } #", "DAG from airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator", "do Titanic e calcula idade media para homens ou mulheres\",", "'Rafael', 'depends_on_past': False, 'start_date': datetime(2020, 11, 29, 18, 20), 'email':", "print(f'Media de idade das mulheres no Titanic: {med}') branch_mulher =", "Dag definition dag = DAG( 'treino-03', description=\"Extrai dados do Titanic", "= pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'female'].Age.mean() print(f'Media de idade", "dag=dag ) def mean_homem(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex", "default_args, schedule_interval='*/20 * * * *' ) get_data = BashOperator(", "'treino-03', description=\"Extrai dados do Titanic e calcula idade media para", "'branch_homem' else: return 'branch_mulher' male_female = BranchPythonOperator( task_id='condicional', python_callable=MouF, provide_context=True,", ") get_data = BashOperator( task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv', dag=dag", "python_callable=mean_mulher, dag=dag ) get_data >> escolhe_h_m >> male_female >> [branch_homem,", "definition dag = DAG( 'treino-03', description=\"Extrai dados do Titanic e", "11, 29, 18, 20), 'email': ['<EMAIL>', '<EMAIL>'], 'email_on_failure': False, 'email_on_retry':", "False, 'email_on_retry': False, 'retries': 1, 'Retry_delay': timedelta(minutes=1) } # Dag", "18, 20), 'email': ['<EMAIL>', '<EMAIL>'], 'email_on_failure': False, 'email_on_retry': False, 'retries':", "task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv', dag=dag ) def sorteia_h_m(): return", "random.choice(['male', 'female']) escolhe_h_m = PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag ) def", "import PythonOperator, BranchPythonOperator from datetime import datetime, timedelta import pandas", "# Default args definition default_args = { 'owner': 'Rafael', 'depends_on_past':", "df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'male'].Age.mean() print(f'Media de", "get_data = BashOperator( task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv', dag=dag )", "df.loc[df.Sex == 'female'].Age.mean() print(f'Media de idade das mulheres no Titanic:", "*' ) get_data = BashOperator( task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv',", "value == 'male': return 'branch_homem' else: return 'branch_mulher' male_female =", "python_callable=MouF, provide_context=True, dag=dag ) def mean_homem(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med", "idade das mulheres no Titanic: {med}') branch_mulher = PythonOperator( task_id='branch_mulher',", "task_id='branch_homem', python_callable=mean_homem, dag=dag ) def mean_mulher(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med", "BashOperator( task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv', dag=dag ) def sorteia_h_m():", "return 'branch_homem' else: return 'branch_mulher' male_female = BranchPythonOperator( task_id='condicional', python_callable=MouF,", "Titanic: {med}') branch_mulher = PythonOperator( task_id='branch_mulher', python_callable=mean_mulher, dag=dag ) get_data", "'retries': 1, 'Retry_delay': timedelta(minutes=1) } # Dag definition dag =", "PythonOperator( task_id='branch_mulher', python_callable=mean_mulher, dag=dag ) get_data >> escolhe_h_m >> male_female", "import datetime, timedelta import pandas as pd import random #", "'email': ['<EMAIL>', '<EMAIL>'], 'email_on_failure': False, 'email_on_retry': False, 'retries': 1, 'Retry_delay':", "branch_mulher = PythonOperator( task_id='branch_mulher', python_callable=mean_mulher, dag=dag ) get_data >> escolhe_h_m", "calcula idade media para homens ou mulheres\", default_args = default_args,", "== 'male': return 'branch_homem' else: return 'branch_mulher' male_female = BranchPythonOperator(", "'start_date': datetime(2020, 11, 29, 18, 20), 'email': ['<EMAIL>', '<EMAIL>'], 'email_on_failure':", "task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag ) def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value ==", "Titanic: {med}') branch_homem = PythonOperator( task_id='branch_homem', python_callable=mean_homem, dag=dag ) def", "branch_homem = PythonOperator( task_id='branch_homem', python_callable=mean_homem, dag=dag ) def mean_mulher(): df", "MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value == 'male': return 'branch_homem' else: return", "idade media para homens ou mulheres\", default_args = default_args, schedule_interval='*/20", "PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag ) def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value", "20), 'email': ['<EMAIL>', '<EMAIL>'], 'email_on_failure': False, 'email_on_retry': False, 'retries': 1,", "* * *' ) get_data = BashOperator( task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv", "False, 'start_date': datetime(2020, 11, 29, 18, 20), 'email': ['<EMAIL>', '<EMAIL>'],", "False, 'retries': 1, 'Retry_delay': timedelta(minutes=1) } # Dag definition dag", "df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'female'].Age.mean() print(f'Media de", "'female']) escolhe_h_m = PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag ) def MouF(**context):", "= { 'owner': 'Rafael', 'depends_on_past': False, 'start_date': datetime(2020, 11, 29,", "https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv', dag=dag ) def sorteia_h_m(): return random.choice(['male', 'female'])", "'depends_on_past': False, 'start_date': datetime(2020, 11, 29, 18, 20), 'email': ['<EMAIL>',", "PythonOperator, BranchPythonOperator from datetime import datetime, timedelta import pandas as", "python_callable=sorteia_h_m, dag=dag ) def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value == 'male':", ") def mean_mulher(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex ==", "das mulheres no Titanic: {med}') branch_mulher = PythonOperator( task_id='branch_mulher', python_callable=mean_mulher,", ") def sorteia_h_m(): return random.choice(['male', 'female']) escolhe_h_m = PythonOperator( task_id='escolhe-h-m',", "default_args = default_args, schedule_interval='*/20 * * * *' ) get_data", "== 'female'].Age.mean() print(f'Media de idade das mulheres no Titanic: {med}')", "e calcula idade media para homens ou mulheres\", default_args =", "airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime", "'email_on_retry': False, 'retries': 1, 'Retry_delay': timedelta(minutes=1) } # Dag definition", "== 'male'].Age.mean() print(f'Media de idade dos homens no Titanic: {med}')", "datetime, timedelta import pandas as pd import random # Default", "'branch_mulher' male_female = BranchPythonOperator( task_id='condicional', python_callable=MouF, provide_context=True, dag=dag ) def", "'male'].Age.mean() print(f'Media de idade dos homens no Titanic: {med}') branch_homem", "datetime import datetime, timedelta import pandas as pd import random", "timedelta import pandas as pd import random # Default args", "dag = DAG( 'treino-03', description=\"Extrai dados do Titanic e calcula", "from datetime import datetime, timedelta import pandas as pd import", "BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime import datetime,", "print(f'Media de idade dos homens no Titanic: {med}') branch_homem =", "return random.choice(['male', 'female']) escolhe_h_m = PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag )", "timedelta(minutes=1) } # Dag definition dag = DAG( 'treino-03', description=\"Extrai", "-o /usr/local/airflow/data/train.csv', dag=dag ) def sorteia_h_m(): return random.choice(['male', 'female']) escolhe_h_m", "BranchPythonOperator from datetime import datetime, timedelta import pandas as pd", "BranchPythonOperator( task_id='condicional', python_callable=MouF, provide_context=True, dag=dag ) def mean_homem(): df =", "'<EMAIL>'], 'email_on_failure': False, 'email_on_retry': False, 'retries': 1, 'Retry_delay': timedelta(minutes=1) }", "= default_args, schedule_interval='*/20 * * * *' ) get_data =", "args definition default_args = { 'owner': 'Rafael', 'depends_on_past': False, 'start_date':", ") def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value == 'male': return 'branch_homem'", "homens ou mulheres\", default_args = default_args, schedule_interval='*/20 * * *", "definition default_args = { 'owner': 'Rafael', 'depends_on_past': False, 'start_date': datetime(2020,", "escolhe_h_m = PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag ) def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m')", "dag=dag ) def mean_mulher(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex", "29, 18, 20), 'email': ['<EMAIL>', '<EMAIL>'], 'email_on_failure': False, 'email_on_retry': False,", "df.loc[df.Sex == 'male'].Age.mean() print(f'Media de idade dos homens no Titanic:", "Titanic e calcula idade media para homens ou mulheres\", default_args", "= PythonOperator( task_id='branch_mulher', python_callable=mean_mulher, dag=dag ) get_data >> escolhe_h_m >>", "as pd import random # Default args definition default_args =", "value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value == 'male': return 'branch_homem' else: return 'branch_mulher'", "pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'female'].Age.mean() print(f'Media de idade das", "{ 'owner': 'Rafael', 'depends_on_past': False, 'start_date': datetime(2020, 11, 29, 18,", "no Titanic: {med}') branch_mulher = PythonOperator( task_id='branch_mulher', python_callable=mean_mulher, dag=dag )", "random # Default args definition default_args = { 'owner': 'Rafael',", "task_id='branch_mulher', python_callable=mean_mulher, dag=dag ) get_data >> escolhe_h_m >> male_female >>", "import random # Default args definition default_args = { 'owner':", "mulheres no Titanic: {med}') branch_mulher = PythonOperator( task_id='branch_mulher', python_callable=mean_mulher, dag=dag", "airflow import DAG from airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator import", "dados do Titanic e calcula idade media para homens ou", "# Dag definition dag = DAG( 'treino-03', description=\"Extrai dados do", "no Titanic: {med}') branch_homem = PythonOperator( task_id='branch_homem', python_callable=mean_homem, dag=dag )", "{med}') branch_homem = PythonOperator( task_id='branch_homem', python_callable=mean_homem, dag=dag ) def mean_mulher():", "para homens ou mulheres\", default_args = default_args, schedule_interval='*/20 * *", "dos homens no Titanic: {med}') branch_homem = PythonOperator( task_id='branch_homem', python_callable=mean_homem,", "pandas as pd import random # Default args definition default_args", "Default args definition default_args = { 'owner': 'Rafael', 'depends_on_past': False,", "media para homens ou mulheres\", default_args = default_args, schedule_interval='*/20 *", "schedule_interval='*/20 * * * *' ) get_data = BashOperator( task_id='get-data',", ") def mean_homem(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex ==", "dag=dag ) def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value == 'male': return", "import BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime import", "= DAG( 'treino-03', description=\"Extrai dados do Titanic e calcula idade", "provide_context=True, dag=dag ) def mean_homem(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med =", "= PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag ) def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if" ]
[ "variable.\\n\" \"In that case it will list only the system", "Dictionary of types from Tcl command, needs to be ordered", "collections.OrderedDict([ ('selection', str), ]) # Dictionary of types from Tcl", "from Tcl command, needs to be ordered , this is", "unnamed_args: :return: \"\"\" if 'selection' in args: argument = args['selection']", "# # MIT Licence # # ########################################################## from tclCommands.TclCommand import", "command to get the list of system variables example: list_sys", "to be ordered arg_names = collections.OrderedDict([ ('selection', str), ]) #", "execute(self, args, unnamed_args): \"\"\" :param args: :param unnamed_args: :return: \"\"\"", "\"Returns the list of the names of system variables.\\n\" \"Without", "get the list of system variables example: list_sys \"\"\" #", "system variable value' to set it.\\n\", 'args': collections.OrderedDict([ ]), 'examples':", "the list with the names of system variables.\") # Dictionary", "system parameters. \" \"As an argument use first letter or", "import * class TclCommandListSys(TclCommand): \"\"\" Tcl shell command to get", ":param args: :param unnamed_args: :return: \"\"\" if 'selection' in args:", "types from Tcl command, needs to be ordered , this", "args, unnamed_args): \"\"\" :param args: :param unnamed_args: :return: \"\"\" if", "args needs to be ordered help = { 'main': \"Returns", "or cncjob or global.\\n\" \"Note: Use 'get_sys system variable' to", "def execute(self, args, unnamed_args): \"\"\" :param args: :param unnamed_args: :return:", "# ########################################################## from tclCommands.TclCommand import * class TclCommandListSys(TclCommand): \"\"\" Tcl", "example: list_sys \"\"\" # List of all command aliases, to", "with the names of system variables.\") # Dictionary of types", "for current Tcl command: required = {'name','outname'} required = []", "\"As an argument use first letter or first letters from", "use first letter or first letters from the name \"", "of all command aliases, to be able use old names", "\"Outputs in Tcl Shell the list with the names of", "command, args needs to be ordered help = { 'main':", "of system variables.\\n\" \"Without an argument it will list all", "Date: 8/17/2019 # # MIT Licence # # ########################################################## from", "for Manufacturing # # File Author: <NAME> (c) # #", "ordered arg_names = collections.OrderedDict([ ('selection', str), ]) # Dictionary of", "old names for backward compatibility (add_poly, add_polygon) aliases = ['list_sys',", "gerber or excellon or geometry or cncjob or global.\\n\" \"Note:", "# MIT Licence # # ########################################################## from tclCommands.TclCommand import *", "starts with that string.\\n\" \"Main categories start with: gerber or", "categories start with: gerber or excellon or geometry or cncjob", "\"In that case it will list only the system variables", "Tcl Shell the list with the names of system variables.\")", "the value and 'set_sys system variable value' to set it.\\n\",", "'list_sys gerber', 'list_sys cncj'] } def execute(self, args, unnamed_args): \"\"\"", "class TclCommandListSys(TclCommand): \"\"\" Tcl shell command to get the list", "(\"--\", \"Outputs in Tcl Shell the list with the names", "or geometry or cncjob or global.\\n\" \"Note: Use 'get_sys system", "if str(k).startswith(str(argument))]) else: ret_val = list(self.app.defaults.keys()) return str(ret_val) # return", "\"of the system variable.\\n\" \"In that case it will list", "command: required = {'name','outname'} required = [] # structured help", ", this is for options like -optionname value option_types =", "get the value and 'set_sys system variable value' to set", "for backward compatibility (add_poly, add_polygon) aliases = ['list_sys', 'listsys'] description", "list of system variables example: list_sys \"\"\" # List of", "\" \"As an argument use first letter or first letters", "2D Post-processing for Manufacturing # # File Author: <NAME> (c)", "Licence # # ########################################################## from tclCommands.TclCommand import * class TclCommandListSys(TclCommand):", "in Tcl Shell the list with the names of system", "= {'name','outname'} required = [] # structured help for current", "or first letters from the name \" \"of the system", "the system variable.\\n\" \"In that case it will list only", "'get_sys system variable' to get the value and 'set_sys system", "# # ########################################################## from tclCommands.TclCommand import * class TclCommandListSys(TclCommand): \"\"\"", "% (\"--\", \"Outputs in Tcl Shell the list with the", "system variables.\") # Dictionary of types from Tcl command, needs", "be able use old names for backward compatibility (add_poly, add_polygon)", "-optionname value option_types = collections.OrderedDict([ ]) # array of mandatory", "8/17/2019 # # MIT Licence # # ########################################################## from tclCommands.TclCommand", "to get the list of system variables example: list_sys \"\"\"", "of system variables example: list_sys \"\"\" # List of all", "all the system parameters. \" \"As an argument use first", "is for options like -optionname value option_types = collections.OrderedDict([ ])", "command, needs to be ordered , this is for options", "variables example: list_sys \"\"\" # List of all command aliases,", "system variables that starts with that string.\\n\" \"Main categories start", "str([k for k in self.app.defaults.keys() if str(k).startswith(str(argument))]) else: ret_val =", "\" \"of the system variable.\\n\" \"In that case it will", "an argument it will list all the system parameters. \"", "case it will list only the system variables that starts", "of types from Tcl command, needs to be ordered arg_names", "the list of system variables example: list_sys \"\"\" # List", "args['selection'] return str([k for k in self.app.defaults.keys() if str(k).startswith(str(argument))]) else:", "variables.\\n\" \"Without an argument it will list all the system", "the system variables that starts with that string.\\n\" \"Main categories", "value and 'set_sys system variable value' to set it.\\n\", 'args':", "able use old names for backward compatibility (add_poly, add_polygon) aliases", "%s' % (\"--\", \"Outputs in Tcl Shell the list with", "of the names of system variables.\\n\" \"Without an argument it", "the name \" \"of the system variable.\\n\" \"In that case", "ser', 'list_sys gerber', 'list_sys cncj'] } def execute(self, args, unnamed_args):", "needs to be ordered , this is for options like", "{'name','outname'} required = [] # structured help for current command,", "# Dictionary of types from Tcl command, needs to be", "if 'selection' in args: argument = args['selection'] return str([k for", "Shell the list with the names of system variables.\") #", "Manufacturing # # File Author: <NAME> (c) # # Date:", "FlatCAM: 2D Post-processing for Manufacturing # # File Author: <NAME>", "value option_types = collections.OrderedDict([ ]) # array of mandatory options", "\"Main categories start with: gerber or excellon or geometry or", "command aliases, to be able use old names for backward", "<filename>tclCommands/TclCommandListSys.py # ########################################################## # FlatCAM: 2D Post-processing for Manufacturing #", "tclCommands.TclCommand import * class TclCommandListSys(TclCommand): \"\"\" Tcl shell command to", "excellon or geometry or cncjob or global.\\n\" \"Note: Use 'get_sys", "ordered , this is for options like -optionname value option_types", "geometry or cncjob or global.\\n\" \"Note: Use 'get_sys system variable'", "\"\"\" if 'selection' in args: argument = args['selection'] return str([k", "= args['selection'] return str([k for k in self.app.defaults.keys() if str(k).startswith(str(argument))])", "= collections.OrderedDict([ ('selection', str), ]) # Dictionary of types from", "types from Tcl command, needs to be ordered arg_names =", "argument it will list all the system parameters. \" \"As", "# # File Author: <NAME> (c) # # Date: 8/17/2019", "to be ordered , this is for options like -optionname", "current command, args needs to be ordered help = {", "in args: argument = args['selection'] return str([k for k in", "{ 'main': \"Returns the list of the names of system", "names of system variables.\") # Dictionary of types from Tcl", "ordered help = { 'main': \"Returns the list of the", "system variables example: list_sys \"\"\" # List of all command", "<NAME> (c) # # Date: 8/17/2019 # # MIT Licence", "system variable' to get the value and 'set_sys system variable", "or global.\\n\" \"Note: Use 'get_sys system variable' to get the", "it.\\n\", 'args': collections.OrderedDict([ ]), 'examples': ['list_sys', 'list_sys ser', 'list_sys gerber',", "# Date: 8/17/2019 # # MIT Licence # # ##########################################################", "required = {'name','outname'} required = [] # structured help for", "letters from the name \" \"of the system variable.\\n\" \"In", "to get the value and 'set_sys system variable value' to", "mandatory options for current Tcl command: required = {'name','outname'} required", "\"\"\" :param args: :param unnamed_args: :return: \"\"\" if 'selection' in", "TclCommandListSys(TclCommand): \"\"\" Tcl shell command to get the list of", "compatibility (add_poly, add_polygon) aliases = ['list_sys', 'listsys'] description = '%s", "only the system variables that starts with that string.\\n\" \"Main", "the list of the names of system variables.\\n\" \"Without an", "name \" \"of the system variable.\\n\" \"In that case it", "be ordered help = { 'main': \"Returns the list of", "# array of mandatory options for current Tcl command: required", "File Author: <NAME> (c) # # Date: 8/17/2019 # #", "to set it.\\n\", 'args': collections.OrderedDict([ ]), 'examples': ['list_sys', 'list_sys ser',", "collections.OrderedDict([ ]), 'examples': ['list_sys', 'list_sys ser', 'list_sys gerber', 'list_sys cncj']", "k in self.app.defaults.keys() if str(k).startswith(str(argument))]) else: ret_val = list(self.app.defaults.keys()) return", "unnamed_args): \"\"\" :param args: :param unnamed_args: :return: \"\"\" if 'selection'", "in self.app.defaults.keys() if str(k).startswith(str(argument))]) else: ret_val = list(self.app.defaults.keys()) return str(ret_val)", "or excellon or geometry or cncjob or global.\\n\" \"Note: Use", "collections.OrderedDict([ ]) # array of mandatory options for current Tcl", "]) # array of mandatory options for current Tcl command:", "start with: gerber or excellon or geometry or cncjob or", "with that string.\\n\" \"Main categories start with: gerber or excellon", "gerber', 'list_sys cncj'] } def execute(self, args, unnamed_args): \"\"\" :param", "be ordered , this is for options like -optionname value", "Use 'get_sys system variable' to get the value and 'set_sys", "argument = args['selection'] return str([k for k in self.app.defaults.keys() if", "from tclCommands.TclCommand import * class TclCommandListSys(TclCommand): \"\"\" Tcl shell command", "options like -optionname value option_types = collections.OrderedDict([ ]) # array", "for options like -optionname value option_types = collections.OrderedDict([ ]) #", "# structured help for current command, args needs to be", "and 'set_sys system variable value' to set it.\\n\", 'args': collections.OrderedDict([", "= collections.OrderedDict([ ]) # array of mandatory options for current", "names of system variables.\\n\" \"Without an argument it will list", "structured help for current command, args needs to be ordered", "command, needs to be ordered arg_names = collections.OrderedDict([ ('selection', str),", "(add_poly, add_polygon) aliases = ['list_sys', 'listsys'] description = '%s %s'", "(c) # # Date: 8/17/2019 # # MIT Licence #", "argument use first letter or first letters from the name", "from Tcl command, needs to be ordered arg_names = collections.OrderedDict([", "help for current command, args needs to be ordered help", "'set_sys system variable value' to set it.\\n\", 'args': collections.OrderedDict([ ]),", "'examples': ['list_sys', 'list_sys ser', 'list_sys gerber', 'list_sys cncj'] } def", "aliases = ['list_sys', 'listsys'] description = '%s %s' % (\"--\",", "'selection' in args: argument = args['selection'] return str([k for k", "the system parameters. \" \"As an argument use first letter", "variables.\") # Dictionary of types from Tcl command, needs to", "str), ]) # Dictionary of types from Tcl command, needs", "list with the names of system variables.\") # Dictionary of", "Tcl command: required = {'name','outname'} required = [] # structured", "cncj'] } def execute(self, args, unnamed_args): \"\"\" :param args: :param", "['list_sys', 'list_sys ser', 'list_sys gerber', 'list_sys cncj'] } def execute(self,", "]) # Dictionary of types from Tcl command, needs to", "str(k).startswith(str(argument))]) else: ret_val = list(self.app.defaults.keys()) return str(ret_val) # return str([*self.app.defaults])", "first letters from the name \" \"of the system variable.\\n\"", "that case it will list only the system variables that", "########################################################## # FlatCAM: 2D Post-processing for Manufacturing # # File", "of mandatory options for current Tcl command: required = {'name','outname'}", "args: :param unnamed_args: :return: \"\"\" if 'selection' in args: argument", "letter or first letters from the name \" \"of the", "an argument use first letter or first letters from the", "for current command, args needs to be ordered help =", "aliases, to be able use old names for backward compatibility", "return str([k for k in self.app.defaults.keys() if str(k).startswith(str(argument))]) else: ret_val", "'%s %s' % (\"--\", \"Outputs in Tcl Shell the list", "['list_sys', 'listsys'] description = '%s %s' % (\"--\", \"Outputs in", "system variables.\\n\" \"Without an argument it will list all the", "option_types = collections.OrderedDict([ ]) # array of mandatory options for", "= [] # structured help for current command, args needs", "Author: <NAME> (c) # # Date: 8/17/2019 # # MIT", "[] # structured help for current command, args needs to", "be ordered arg_names = collections.OrderedDict([ ('selection', str), ]) # Dictionary", "= ['list_sys', 'listsys'] description = '%s %s' % (\"--\", \"Outputs", "set it.\\n\", 'args': collections.OrderedDict([ ]), 'examples': ['list_sys', 'list_sys ser', 'list_sys", "# ########################################################## # FlatCAM: 2D Post-processing for Manufacturing # #", "('selection', str), ]) # Dictionary of types from Tcl command,", "\"Note: Use 'get_sys system variable' to get the value and", "from the name \" \"of the system variable.\\n\" \"In that", "that string.\\n\" \"Main categories start with: gerber or excellon or", "current Tcl command: required = {'name','outname'} required = [] #", "self.app.defaults.keys() if str(k).startswith(str(argument))]) else: ret_val = list(self.app.defaults.keys()) return str(ret_val) #", ":param unnamed_args: :return: \"\"\" if 'selection' in args: argument =", "it will list only the system variables that starts with", "= { 'main': \"Returns the list of the names of", "arg_names = collections.OrderedDict([ ('selection', str), ]) # Dictionary of types", "MIT Licence # # ########################################################## from tclCommands.TclCommand import * class", "it will list all the system parameters. \" \"As an", "of types from Tcl command, needs to be ordered ,", "Tcl command, needs to be ordered , this is for", "args: argument = args['selection'] return str([k for k in self.app.defaults.keys()", "like -optionname value option_types = collections.OrderedDict([ ]) # array of", "all command aliases, to be able use old names for", "list_sys \"\"\" # List of all command aliases, to be", "to be ordered help = { 'main': \"Returns the list", "'main': \"Returns the list of the names of system variables.\\n\"", "list of the names of system variables.\\n\" \"Without an argument", "variable' to get the value and 'set_sys system variable value'", "'list_sys ser', 'list_sys gerber', 'list_sys cncj'] } def execute(self, args,", "names for backward compatibility (add_poly, add_polygon) aliases = ['list_sys', 'listsys']", "list only the system variables that starts with that string.\\n\"", ":return: \"\"\" if 'selection' in args: argument = args['selection'] return", "# List of all command aliases, to be able use", "needs to be ordered help = { 'main': \"Returns the", "options for current Tcl command: required = {'name','outname'} required =", "\"\"\" # List of all command aliases, to be able", "variable value' to set it.\\n\", 'args': collections.OrderedDict([ ]), 'examples': ['list_sys',", "# FlatCAM: 2D Post-processing for Manufacturing # # File Author:", "parameters. \" \"As an argument use first letter or first", "\"\"\" Tcl shell command to get the list of system", "this is for options like -optionname value option_types = collections.OrderedDict([", "Tcl command, needs to be ordered arg_names = collections.OrderedDict([ ('selection',", "shell command to get the list of system variables example:", "'args': collections.OrderedDict([ ]), 'examples': ['list_sys', 'list_sys ser', 'list_sys gerber', 'list_sys", "with: gerber or excellon or geometry or cncjob or global.\\n\"", "use old names for backward compatibility (add_poly, add_polygon) aliases =", "that starts with that string.\\n\" \"Main categories start with: gerber", "will list all the system parameters. \" \"As an argument", "List of all command aliases, to be able use old", "the names of system variables.\\n\" \"Without an argument it will", "will list only the system variables that starts with that", "value' to set it.\\n\", 'args': collections.OrderedDict([ ]), 'examples': ['list_sys', 'list_sys", "first letter or first letters from the name \" \"of", "# # Date: 8/17/2019 # # MIT Licence # #", "backward compatibility (add_poly, add_polygon) aliases = ['list_sys', 'listsys'] description =", "'list_sys cncj'] } def execute(self, args, unnamed_args): \"\"\" :param args:", "Tcl shell command to get the list of system variables", "} def execute(self, args, unnamed_args): \"\"\" :param args: :param unnamed_args:", "add_polygon) aliases = ['list_sys', 'listsys'] description = '%s %s' %", "help = { 'main': \"Returns the list of the names", "]), 'examples': ['list_sys', 'list_sys ser', 'list_sys gerber', 'list_sys cncj'] }", "required = [] # structured help for current command, args", "string.\\n\" \"Main categories start with: gerber or excellon or geometry", "Post-processing for Manufacturing # # File Author: <NAME> (c) #", "# File Author: <NAME> (c) # # Date: 8/17/2019 #", "needs to be ordered arg_names = collections.OrderedDict([ ('selection', str), ])", "list all the system parameters. \" \"As an argument use", "\"Without an argument it will list all the system parameters.", "to be able use old names for backward compatibility (add_poly,", "description = '%s %s' % (\"--\", \"Outputs in Tcl Shell", "system variable.\\n\" \"In that case it will list only the", "for k in self.app.defaults.keys() if str(k).startswith(str(argument))]) else: ret_val = list(self.app.defaults.keys())", "= '%s %s' % (\"--\", \"Outputs in Tcl Shell the", "of system variables.\") # Dictionary of types from Tcl command,", "cncjob or global.\\n\" \"Note: Use 'get_sys system variable' to get", "'listsys'] description = '%s %s' % (\"--\", \"Outputs in Tcl", "########################################################## from tclCommands.TclCommand import * class TclCommandListSys(TclCommand): \"\"\" Tcl shell", "* class TclCommandListSys(TclCommand): \"\"\" Tcl shell command to get the", "global.\\n\" \"Note: Use 'get_sys system variable' to get the value", "array of mandatory options for current Tcl command: required =", "the names of system variables.\") # Dictionary of types from", "variables that starts with that string.\\n\" \"Main categories start with:" ]